1 | /* Output Dwarf2 format symbol table information from GCC. |
2 | Copyright (C) 1992-2024 Free Software Foundation, Inc. |
3 | Contributed by Gary Funck (gary@intrepid.com). |
4 | Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com). |
5 | Extensively modified by Jason Merrill (jason@cygnus.com). |
6 | |
7 | This file is part of GCC. |
8 | |
9 | GCC is free software; you can redistribute it and/or modify it under |
10 | the terms of the GNU General Public License as published by the Free |
11 | Software Foundation; either version 3, or (at your option) any later |
12 | version. |
13 | |
14 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
15 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
17 | for more details. |
18 | |
19 | You should have received a copy of the GNU General Public License |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ |
22 | |
23 | /* TODO: Emit .debug_line header even when there are no functions, since |
24 | the file numbers are used by .debug_info. Alternately, leave |
25 | out locations for types and decls. |
26 | Avoid talking about ctors and op= for PODs. |
27 | Factor out common prologue sequences into multiple CIEs. */ |
28 | |
29 | /* The first part of this file deals with the DWARF 2 frame unwind |
30 | information, which is also used by the GCC efficient exception handling |
31 | mechanism. The second part, controlled only by an #ifdef |
32 | DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging |
33 | information. */ |
34 | |
35 | /* DWARF2 Abbreviation Glossary: |
36 | |
37 | CFA = Canonical Frame Address |
38 | a fixed address on the stack which identifies a call frame. |
39 | We define it to be the value of SP just before the call insn. |
40 | The CFA register and offset, which may change during the course |
41 | of the function, are used to calculate its value at runtime. |
42 | |
43 | CFI = Call Frame Instruction |
44 | an instruction for the DWARF2 abstract machine |
45 | |
46 | CIE = Common Information Entry |
47 | information describing information common to one or more FDEs |
48 | |
49 | DIE = Debugging Information Entry |
50 | |
51 | FDE = Frame Description Entry |
52 | information describing the stack call frame, in particular, |
53 | how to restore registers |
54 | |
55 | DW_CFA_... = DWARF2 CFA call frame instruction |
56 | DW_TAG_... = DWARF2 DIE tag */ |
57 | |
58 | #include "config.h" |
59 | #include "system.h" |
60 | #include "coretypes.h" |
61 | #include "target.h" |
62 | #include "function.h" |
63 | #include "rtl.h" |
64 | #include "tree.h" |
65 | #include "memmodel.h" |
66 | #include "tm_p.h" |
67 | #include "stringpool.h" |
68 | #include "insn-config.h" |
69 | #include "ira.h" |
70 | #include "cgraph.h" |
71 | #include "diagnostic.h" |
72 | #include "fold-const.h" |
73 | #include "stor-layout.h" |
74 | #include "varasm.h" |
75 | #include "version.h" |
76 | #include "flags.h" |
77 | #include "rtlhash.h" |
78 | #include "reload.h" |
79 | #include "output.h" |
80 | #include "expr.h" |
81 | #include "dwarf2out.h" |
82 | #include "dwarf2ctf.h" |
83 | #include "dwarf2asm.h" |
84 | #include "toplev.h" |
85 | #include "md5.h" |
86 | #include "tree-pretty-print.h" |
87 | #include "print-rtl.h" |
88 | #include "debug.h" |
89 | #include "common/common-target.h" |
90 | #include "langhooks.h" |
91 | #include "lra.h" |
92 | #include "dumpfile.h" |
93 | #include "opts.h" |
94 | #include "tree-dfa.h" |
95 | #include "gdb/gdb-index.h" |
96 | #include "rtl-iter.h" |
97 | #include "stringpool.h" |
98 | #include "attribs.h" |
99 | #include "file-prefix-map.h" /* remap_debug_filename() */ |
100 | |
101 | static void dwarf2out_source_line (unsigned int, unsigned int, const char *, |
102 | int, bool); |
103 | static rtx_insn *last_var_location_insn; |
104 | static rtx_insn *cached_next_real_insn; |
105 | static void dwarf2out_decl (tree); |
106 | static bool is_redundant_typedef (const_tree); |
107 | |
108 | #ifndef XCOFF_DEBUGGING_INFO |
109 | #define XCOFF_DEBUGGING_INFO 0 |
110 | #endif |
111 | |
112 | #ifndef HAVE_XCOFF_DWARF_EXTRAS |
113 | #define 0 |
114 | #endif |
115 | |
116 | #ifdef VMS_DEBUGGING_INFO |
117 | int vms_file_stats_name (const char *, long long *, long *, char *, int *); |
118 | |
119 | /* Define this macro to be a nonzero value if the directory specifications |
120 | which are output in the debug info should end with a separator. */ |
121 | #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1 |
122 | /* Define this macro to evaluate to a nonzero value if GCC should refrain |
123 | from generating indirect strings in DWARF2 debug information, for instance |
124 | if your target is stuck with an old version of GDB that is unable to |
125 | process them properly or uses VMS Debug. */ |
126 | #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1 |
127 | #else |
128 | #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0 |
129 | #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0 |
130 | #endif |
131 | |
132 | /* ??? Poison these here until it can be done generically. They've been |
133 | totally replaced in this file; make sure it stays that way. */ |
134 | #undef DWARF2_UNWIND_INFO |
135 | #undef DWARF2_FRAME_INFO |
136 | #if (GCC_VERSION >= 3000) |
137 | #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO |
138 | #endif |
139 | |
140 | /* The size of the target's pointer type. */ |
141 | #ifndef PTR_SIZE |
142 | #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT) |
143 | #endif |
144 | |
145 | /* Array of RTXes referenced by the debugging information, which therefore |
146 | must be kept around forever. */ |
147 | static GTY(()) vec<rtx, va_gc> *used_rtx_array; |
148 | |
149 | /* A pointer to the base of a list of incomplete types which might be |
150 | completed at some later time. incomplete_types_list needs to be a |
151 | vec<tree, va_gc> *because we want to tell the garbage collector about |
152 | it. */ |
153 | static GTY(()) vec<tree, va_gc> *incomplete_types; |
154 | |
155 | /* Pointers to various DWARF2 sections. */ |
156 | static GTY(()) section *debug_info_section; |
157 | static GTY(()) section *debug_skeleton_info_section; |
158 | static GTY(()) section *debug_abbrev_section; |
159 | static GTY(()) section *debug_skeleton_abbrev_section; |
160 | static GTY(()) section *debug_aranges_section; |
161 | static GTY(()) section *debug_addr_section; |
162 | static GTY(()) section *debug_macinfo_section; |
163 | static const char *debug_macinfo_section_name; |
164 | static unsigned macinfo_label_base = 1; |
165 | static GTY(()) section *debug_line_section; |
166 | static GTY(()) section *debug_skeleton_line_section; |
167 | static GTY(()) section *debug_loc_section; |
168 | static GTY(()) section *debug_pubnames_section; |
169 | static GTY(()) section *debug_pubtypes_section; |
170 | static GTY(()) section *debug_str_section; |
171 | static GTY(()) section *debug_line_str_section; |
172 | static GTY(()) section *debug_str_dwo_section; |
173 | static GTY(()) section *debug_str_offsets_section; |
174 | static GTY(()) section *debug_ranges_section; |
175 | static GTY(()) section *debug_ranges_dwo_section; |
176 | static GTY(()) section *debug_frame_section; |
177 | |
178 | /* Maximum size (in bytes) of an artificially generated label. */ |
179 | #define MAX_ARTIFICIAL_LABEL_BYTES 40 |
180 | |
181 | /* According to the (draft) DWARF 3 specification, the initial length |
182 | should either be 4 or 12 bytes. When it's 12 bytes, the first 4 |
183 | bytes are 0xffffffff, followed by the length stored in the next 8 |
184 | bytes. |
185 | |
186 | However, the SGI/MIPS ABI uses an initial length which is equal to |
187 | dwarf_offset_size. It is defined (elsewhere) accordingly. */ |
188 | |
189 | #ifndef DWARF_INITIAL_LENGTH_SIZE |
190 | #define DWARF_INITIAL_LENGTH_SIZE (dwarf_offset_size == 4 ? 4 : 12) |
191 | #endif |
192 | |
193 | #ifndef DWARF_INITIAL_LENGTH_SIZE_STR |
194 | #define DWARF_INITIAL_LENGTH_SIZE_STR (dwarf_offset_size == 4 ? "-4" : "-12") |
195 | #endif |
196 | |
197 | /* Round SIZE up to the nearest BOUNDARY. */ |
198 | #define DWARF_ROUND(SIZE,BOUNDARY) \ |
199 | ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY)) |
200 | |
201 | /* CIE identifier. */ |
202 | #if HOST_BITS_PER_WIDE_INT >= 64 |
203 | #define DWARF_CIE_ID \ |
204 | (unsigned HOST_WIDE_INT) (dwarf_offset_size == 4 ? DW_CIE_ID : DW64_CIE_ID) |
205 | #else |
206 | #define DWARF_CIE_ID DW_CIE_ID |
207 | #endif |
208 | |
209 | |
210 | /* A vector for a table that contains frame description |
211 | information for each routine. */ |
212 | #define NOT_INDEXED (-1U) |
213 | #define NO_INDEX_ASSIGNED (-2U) |
214 | |
215 | static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec; |
216 | |
217 | struct GTY((for_user)) indirect_string_node { |
218 | const char *str; |
219 | unsigned int refcount; |
220 | enum dwarf_form form; |
221 | char *label; |
222 | unsigned int index; |
223 | }; |
224 | |
225 | struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node> |
226 | { |
227 | typedef const char *compare_type; |
228 | |
229 | static hashval_t hash (indirect_string_node *); |
230 | static bool equal (indirect_string_node *, const char *); |
231 | }; |
232 | |
233 | static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash; |
234 | |
235 | static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash; |
236 | |
237 | /* With split_debug_info, both the comp_dir and dwo_name go in the |
238 | main object file, rather than the dwo, similar to the force_direct |
239 | parameter elsewhere but with additional complications: |
240 | |
241 | 1) The string is needed in both the main object file and the dwo. |
242 | That is, the comp_dir and dwo_name will appear in both places. |
243 | |
244 | 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp, |
245 | DW_FORM_line_strp or DW_FORM_strx/GNU_str_index. |
246 | |
247 | 3) GCC chooses the form to use late, depending on the size and |
248 | reference count. |
249 | |
250 | Rather than forcing the all debug string handling functions and |
251 | callers to deal with these complications, simply use a separate, |
252 | special-cased string table for any attribute that should go in the |
253 | main object file. This limits the complexity to just the places |
254 | that need it. */ |
255 | |
256 | static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash; |
257 | |
258 | static GTY(()) int dw2_string_counter; |
259 | |
260 | /* True if the compilation unit places functions in more than one section. */ |
261 | static GTY(()) bool have_multiple_function_sections = false; |
262 | |
263 | /* The default cold text section. */ |
264 | static GTY(()) section *cold_text_section; |
265 | |
266 | /* True if currently in text section. */ |
267 | static GTY(()) bool in_text_section_p = false; |
268 | |
269 | /* Last debug-on location in corresponding section. */ |
270 | static GTY(()) const char *last_text_label; |
271 | static GTY(()) const char *last_cold_label; |
272 | |
273 | /* Mark debug-on/off locations per section. |
274 | NULL means the section is not used at all. */ |
275 | static GTY(()) vec<const char *, va_gc> *switch_text_ranges; |
276 | static GTY(()) vec<const char *, va_gc> *switch_cold_ranges; |
277 | |
278 | /* The DIE for C++14 'auto' in a function return type. */ |
279 | static GTY(()) dw_die_ref auto_die; |
280 | |
281 | /* The DIE for C++14 'decltype(auto)' in a function return type. */ |
282 | static GTY(()) dw_die_ref decltype_auto_die; |
283 | |
284 | /* Forward declarations for functions defined in this file. */ |
285 | |
286 | static void output_call_frame_info (int); |
287 | |
288 | /* Personality decl of current unit. Used only when assembler does not support |
289 | personality CFI. */ |
290 | static GTY(()) rtx current_unit_personality; |
291 | |
292 | /* Whether an eh_frame section is required. */ |
293 | static GTY(()) bool do_eh_frame = false; |
294 | |
295 | /* .debug_rnglists next index. */ |
296 | static unsigned int rnglist_idx; |
297 | |
298 | /* Data and reference forms for relocatable data. */ |
299 | #define DW_FORM_data (dwarf_offset_size == 8 ? DW_FORM_data8 : DW_FORM_data4) |
300 | #define DW_FORM_ref (dwarf_offset_size == 8 ? DW_FORM_ref8 : DW_FORM_ref4) |
301 | |
302 | #ifndef DEBUG_FRAME_SECTION |
303 | #define DEBUG_FRAME_SECTION ".debug_frame" |
304 | #endif |
305 | |
306 | #ifndef FUNC_BEGIN_LABEL |
307 | #define FUNC_BEGIN_LABEL "LFB" |
308 | #endif |
309 | |
310 | #ifndef FUNC_SECOND_SECT_LABEL |
311 | #define FUNC_SECOND_SECT_LABEL "LFSB" |
312 | #endif |
313 | |
314 | #ifndef FUNC_END_LABEL |
315 | #define FUNC_END_LABEL "LFE" |
316 | #endif |
317 | |
318 | #ifndef PROLOGUE_END_LABEL |
319 | #define PROLOGUE_END_LABEL "LPE" |
320 | #endif |
321 | |
322 | #ifndef EPILOGUE_BEGIN_LABEL |
323 | #define EPILOGUE_BEGIN_LABEL "LEB" |
324 | #endif |
325 | |
326 | #ifndef FRAME_BEGIN_LABEL |
327 | #define FRAME_BEGIN_LABEL "Lframe" |
328 | #endif |
329 | #define CIE_AFTER_SIZE_LABEL "LSCIE" |
330 | #define CIE_END_LABEL "LECIE" |
331 | #define FDE_LABEL "LSFDE" |
332 | #define FDE_AFTER_SIZE_LABEL "LASFDE" |
333 | #define FDE_END_LABEL "LEFDE" |
334 | #define LINE_NUMBER_BEGIN_LABEL "LSLT" |
335 | #define LINE_NUMBER_END_LABEL "LELT" |
336 | #define LN_PROLOG_AS_LABEL "LASLTP" |
337 | #define LN_PROLOG_END_LABEL "LELTP" |
338 | #define DIE_LABEL_PREFIX "DW" |
339 | |
340 | /* Match the base name of a file to the base name of a compilation unit. */ |
341 | |
342 | static bool |
343 | matches_main_base (const char *path) |
344 | { |
345 | /* Cache the last query. */ |
346 | static const char *last_path = NULL; |
347 | static bool last_match = false; |
348 | if (path != last_path) |
349 | { |
350 | const char *base; |
351 | int length = base_of_path (path, base_out: &base); |
352 | last_path = path; |
353 | last_match = (length == main_input_baselength |
354 | && memcmp (s1: base, main_input_basename, n: length) == 0); |
355 | } |
356 | return last_match; |
357 | } |
358 | |
359 | #ifdef DEBUG_DEBUG_STRUCT |
360 | |
361 | static bool |
362 | dump_struct_debug (tree type, enum debug_info_usage usage, |
363 | enum debug_struct_file criterion, int generic, |
364 | bool matches, bool result) |
365 | { |
366 | /* Find the type name. */ |
367 | tree type_decl = TYPE_STUB_DECL (type); |
368 | tree t = type_decl; |
369 | const char *name = 0; |
370 | if (TREE_CODE (t) == TYPE_DECL) |
371 | t = DECL_NAME (t); |
372 | if (t) |
373 | name = IDENTIFIER_POINTER (t); |
374 | |
375 | fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n" , |
376 | criterion, |
377 | DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr" , |
378 | matches ? "bas" : "hdr" , |
379 | generic ? "gen" : "ord" , |
380 | usage == DINFO_USAGE_DFN ? ";" : |
381 | usage == DINFO_USAGE_DIR_USE ? "." : "*" , |
382 | result, |
383 | (void*) type_decl, name); |
384 | return result; |
385 | } |
386 | #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \ |
387 | dump_struct_debug (type, usage, criterion, generic, matches, result) |
388 | |
389 | #else |
390 | |
391 | #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \ |
392 | (result) |
393 | |
394 | #endif |
395 | |
396 | /* Get the number of HOST_WIDE_INTs needed to represent the precision |
397 | of the number. */ |
398 | |
399 | static unsigned int |
400 | get_full_len (const dw_wide_int &op) |
401 | { |
402 | return CEIL (op.get_precision (), HOST_BITS_PER_WIDE_INT); |
403 | } |
404 | |
405 | static bool |
406 | should_emit_struct_debug (tree type, enum debug_info_usage usage) |
407 | { |
408 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
409 | return false; |
410 | |
411 | enum debug_struct_file criterion; |
412 | tree type_decl; |
413 | bool generic = lang_hooks.types.generic_p (type); |
414 | |
415 | if (generic) |
416 | criterion = debug_struct_generic[usage]; |
417 | else |
418 | criterion = debug_struct_ordinary[usage]; |
419 | |
420 | if (criterion == DINFO_STRUCT_FILE_NONE) |
421 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, false); |
422 | if (criterion == DINFO_STRUCT_FILE_ANY) |
423 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, true); |
424 | |
425 | type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type)); |
426 | |
427 | if (type_decl != NULL) |
428 | { |
429 | if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl)) |
430 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, true); |
431 | |
432 | if (matches_main_base (DECL_SOURCE_FILE (type_decl))) |
433 | return DUMP_GSTRUCT (type, usage, criterion, generic, true, true); |
434 | } |
435 | |
436 | return DUMP_GSTRUCT (type, usage, criterion, generic, false, false); |
437 | } |
438 | |
439 | /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section, |
440 | switch to the data section instead, and write out a synthetic start label |
441 | for collect2 the first time around. */ |
442 | |
443 | static void |
444 | switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED) |
445 | { |
446 | if (eh_frame_section == 0) |
447 | { |
448 | int flags; |
449 | |
450 | if (EH_TABLES_CAN_BE_READ_ONLY) |
451 | { |
452 | int fde_encoding; |
453 | int per_encoding; |
454 | int lsda_encoding; |
455 | |
456 | fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, |
457 | /*global=*/0); |
458 | per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, |
459 | /*global=*/1); |
460 | lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, |
461 | /*global=*/0); |
462 | flags = ((! flag_pic |
463 | || ((fde_encoding & 0x70) != DW_EH_PE_absptr |
464 | && (fde_encoding & 0x70) != DW_EH_PE_aligned |
465 | && (per_encoding & 0x70) != DW_EH_PE_absptr |
466 | && (per_encoding & 0x70) != DW_EH_PE_aligned |
467 | && (lsda_encoding & 0x70) != DW_EH_PE_absptr |
468 | && (lsda_encoding & 0x70) != DW_EH_PE_aligned)) |
469 | ? 0 : SECTION_WRITE); |
470 | } |
471 | else |
472 | flags = SECTION_WRITE; |
473 | |
474 | #ifdef EH_FRAME_SECTION_NAME |
475 | eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL); |
476 | #else |
477 | eh_frame_section = ((flags == SECTION_WRITE) |
478 | ? data_section : readonly_data_section); |
479 | #endif /* EH_FRAME_SECTION_NAME */ |
480 | } |
481 | |
482 | switch_to_section (eh_frame_section); |
483 | |
484 | #ifdef EH_FRAME_THROUGH_COLLECT2 |
485 | /* We have no special eh_frame section. Emit special labels to guide |
486 | collect2. */ |
487 | if (!back) |
488 | { |
489 | tree label = get_file_function_name ("F" ); |
490 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE)); |
491 | targetm.asm_out.globalize_label (asm_out_file, |
492 | IDENTIFIER_POINTER (label)); |
493 | ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label)); |
494 | } |
495 | #endif |
496 | } |
497 | |
498 | /* Switch [BACK] to the eh or debug frame table section, depending on |
499 | FOR_EH. */ |
500 | |
501 | static void |
502 | switch_to_frame_table_section (int for_eh, bool back) |
503 | { |
504 | if (for_eh) |
505 | switch_to_eh_frame_section (back); |
506 | else |
507 | { |
508 | if (!debug_frame_section) |
509 | debug_frame_section = get_section (DEBUG_FRAME_SECTION, |
510 | SECTION_DEBUG, NULL); |
511 | switch_to_section (debug_frame_section); |
512 | } |
513 | } |
514 | |
515 | /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */ |
516 | |
517 | enum dw_cfi_oprnd_type |
518 | dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi) |
519 | { |
520 | switch (cfi) |
521 | { |
522 | case DW_CFA_nop: |
523 | case DW_CFA_GNU_window_save: |
524 | case DW_CFA_remember_state: |
525 | case DW_CFA_restore_state: |
526 | return dw_cfi_oprnd_unused; |
527 | |
528 | case DW_CFA_set_loc: |
529 | case DW_CFA_advance_loc1: |
530 | case DW_CFA_advance_loc2: |
531 | case DW_CFA_advance_loc4: |
532 | case DW_CFA_MIPS_advance_loc8: |
533 | return dw_cfi_oprnd_addr; |
534 | |
535 | case DW_CFA_offset: |
536 | case DW_CFA_offset_extended: |
537 | case DW_CFA_def_cfa: |
538 | case DW_CFA_offset_extended_sf: |
539 | case DW_CFA_def_cfa_sf: |
540 | case DW_CFA_restore: |
541 | case DW_CFA_restore_extended: |
542 | case DW_CFA_undefined: |
543 | case DW_CFA_same_value: |
544 | case DW_CFA_def_cfa_register: |
545 | case DW_CFA_register: |
546 | case DW_CFA_expression: |
547 | case DW_CFA_val_expression: |
548 | return dw_cfi_oprnd_reg_num; |
549 | |
550 | case DW_CFA_def_cfa_offset: |
551 | case DW_CFA_GNU_args_size: |
552 | case DW_CFA_def_cfa_offset_sf: |
553 | return dw_cfi_oprnd_offset; |
554 | |
555 | case DW_CFA_def_cfa_expression: |
556 | return dw_cfi_oprnd_loc; |
557 | |
558 | default: |
559 | gcc_unreachable (); |
560 | } |
561 | } |
562 | |
563 | /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */ |
564 | |
565 | enum dw_cfi_oprnd_type |
566 | dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi) |
567 | { |
568 | switch (cfi) |
569 | { |
570 | case DW_CFA_def_cfa: |
571 | case DW_CFA_def_cfa_sf: |
572 | case DW_CFA_offset: |
573 | case DW_CFA_offset_extended_sf: |
574 | case DW_CFA_offset_extended: |
575 | return dw_cfi_oprnd_offset; |
576 | |
577 | case DW_CFA_register: |
578 | return dw_cfi_oprnd_reg_num; |
579 | |
580 | case DW_CFA_expression: |
581 | case DW_CFA_val_expression: |
582 | return dw_cfi_oprnd_loc; |
583 | |
584 | case DW_CFA_def_cfa_expression: |
585 | return dw_cfi_oprnd_cfa_loc; |
586 | |
587 | default: |
588 | return dw_cfi_oprnd_unused; |
589 | } |
590 | } |
591 | |
592 | /* Output one FDE. */ |
593 | |
594 | static void |
595 | output_fde (dw_fde_ref fde, bool for_eh, bool second, |
596 | char *section_start_label, int fde_encoding, char *augmentation, |
597 | bool any_lsda_needed, int lsda_encoding) |
598 | { |
599 | const char *begin, *end; |
600 | static unsigned int j; |
601 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
602 | |
603 | targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh, |
604 | /* empty */ 0); |
605 | targetm.asm_out.internal_label (asm_out_file, FDE_LABEL, |
606 | for_eh + j); |
607 | ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j); |
608 | ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j); |
609 | if (!XCOFF_DEBUGGING_INFO || for_eh) |
610 | { |
611 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh) |
612 | dw2_asm_output_data (4, 0xffffffff, "Initial length escape value" |
613 | " indicating 64-bit DWARF extension" ); |
614 | dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1, |
615 | "FDE Length" ); |
616 | } |
617 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
618 | |
619 | if (for_eh) |
620 | dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset" ); |
621 | else |
622 | dw2_asm_output_offset (dwarf_offset_size, section_start_label, |
623 | debug_frame_section, "FDE CIE offset" ); |
624 | |
625 | begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin; |
626 | end = second ? fde->dw_fde_second_end : fde->dw_fde_end; |
627 | |
628 | if (for_eh) |
629 | { |
630 | rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin); |
631 | SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL; |
632 | dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false, |
633 | "FDE initial location" ); |
634 | dw2_asm_output_delta (size_of_encoded_value (fde_encoding), |
635 | end, begin, "FDE address range" ); |
636 | } |
637 | else |
638 | { |
639 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location" ); |
640 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range" ); |
641 | } |
642 | |
643 | if (augmentation[0]) |
644 | { |
645 | if (any_lsda_needed) |
646 | { |
647 | int size = size_of_encoded_value (lsda_encoding); |
648 | |
649 | if (lsda_encoding == DW_EH_PE_aligned) |
650 | { |
651 | int offset = ( 4 /* Length */ |
652 | + 4 /* CIE offset */ |
653 | + 2 * size_of_encoded_value (fde_encoding) |
654 | + 1 /* Augmentation size */ ); |
655 | int pad = -offset & (PTR_SIZE - 1); |
656 | |
657 | size += pad; |
658 | gcc_assert (size_of_uleb128 (size) == 1); |
659 | } |
660 | |
661 | dw2_asm_output_data_uleb128 (size, "Augmentation size" ); |
662 | |
663 | if (fde->uses_eh_lsda) |
664 | { |
665 | ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA" , |
666 | fde->funcdef_number); |
667 | dw2_asm_output_encoded_addr_rtx (lsda_encoding, |
668 | gen_rtx_SYMBOL_REF (Pmode, l1), |
669 | false, |
670 | "Language Specific Data Area" ); |
671 | } |
672 | else |
673 | { |
674 | if (lsda_encoding == DW_EH_PE_aligned) |
675 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE)); |
676 | dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0, |
677 | "Language Specific Data Area (none)" ); |
678 | } |
679 | } |
680 | else |
681 | dw2_asm_output_data_uleb128 (0, "Augmentation size" ); |
682 | } |
683 | |
684 | /* Loop through the Call Frame Instructions associated with this FDE. */ |
685 | fde->dw_fde_current_label = begin; |
686 | { |
687 | size_t from, until, i; |
688 | |
689 | from = 0; |
690 | until = vec_safe_length (v: fde->dw_fde_cfi); |
691 | |
692 | if (fde->dw_fde_second_begin == NULL) |
693 | ; |
694 | else if (!second) |
695 | until = fde->dw_fde_switch_cfi_index; |
696 | else |
697 | from = fde->dw_fde_switch_cfi_index; |
698 | |
699 | for (i = from; i < until; i++) |
700 | output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh); |
701 | } |
702 | |
703 | /* If we are to emit a ref/link from function bodies to their frame tables, |
704 | do it now. This is typically performed to make sure that tables |
705 | associated with functions are dragged with them and not discarded in |
706 | garbage collecting links. We need to do this on a per function basis to |
707 | cope with -ffunction-sections. */ |
708 | |
709 | #ifdef ASM_OUTPUT_DWARF_TABLE_REF |
710 | /* Switch to the function section, emit the ref to the tables, and |
711 | switch *back* into the table section. */ |
712 | switch_to_section (function_section (fde->decl)); |
713 | ASM_OUTPUT_DWARF_TABLE_REF (section_start_label); |
714 | switch_to_frame_table_section (for_eh, true); |
715 | #endif |
716 | |
717 | /* Pad the FDE out to an address sized boundary. */ |
718 | ASM_OUTPUT_ALIGN (asm_out_file, |
719 | floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE))); |
720 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
721 | |
722 | j += 2; |
723 | } |
724 | |
725 | /* Return true if frame description entry FDE is needed for EH. */ |
726 | |
727 | static bool |
728 | fde_needed_for_eh_p (dw_fde_ref fde) |
729 | { |
730 | if (flag_asynchronous_unwind_tables) |
731 | return true; |
732 | |
733 | if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl)) |
734 | return true; |
735 | |
736 | if (fde->uses_eh_lsda) |
737 | return true; |
738 | |
739 | /* If exceptions are enabled, we have collected nothrow info. */ |
740 | if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow)) |
741 | return false; |
742 | |
743 | return true; |
744 | } |
745 | |
746 | /* Output the call frame information used to record information |
747 | that relates to calculating the frame pointer, and records the |
748 | location of saved registers. */ |
749 | |
750 | static void |
751 | output_call_frame_info (int for_eh) |
752 | { |
753 | unsigned int i; |
754 | dw_fde_ref fde; |
755 | dw_cfi_ref cfi; |
756 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
757 | char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
758 | bool any_lsda_needed = false; |
759 | char augmentation[6]; |
760 | int augmentation_size; |
761 | int fde_encoding = DW_EH_PE_absptr; |
762 | int per_encoding = DW_EH_PE_absptr; |
763 | int lsda_encoding = DW_EH_PE_absptr; |
764 | int return_reg; |
765 | rtx personality = NULL; |
766 | int dw_cie_version; |
767 | |
768 | /* Don't emit a CIE if there won't be any FDEs. */ |
769 | if (!fde_vec) |
770 | return; |
771 | |
772 | /* Nothing to do if the assembler's doing it all. */ |
773 | if (dwarf2out_do_cfi_asm ()) |
774 | return; |
775 | |
776 | /* If we don't have any functions we'll want to unwind out of, don't emit |
777 | any EH unwind information. If we make FDEs linkonce, we may have to |
778 | emit an empty label for an FDE that wouldn't otherwise be emitted. We |
779 | want to avoid having an FDE kept around when the function it refers to |
780 | is discarded. Example where this matters: a primary function template |
781 | in C++ requires EH information, an explicit specialization doesn't. */ |
782 | if (for_eh) |
783 | { |
784 | bool any_eh_needed = false; |
785 | |
786 | FOR_EACH_VEC_ELT (*fde_vec, i, fde) |
787 | { |
788 | if (fde->uses_eh_lsda) |
789 | any_eh_needed = any_lsda_needed = true; |
790 | else if (fde_needed_for_eh_p (fde)) |
791 | any_eh_needed = true; |
792 | else if (TARGET_USES_WEAK_UNWIND_INFO) |
793 | targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1); |
794 | } |
795 | |
796 | if (!any_eh_needed) |
797 | return; |
798 | } |
799 | |
800 | /* We're going to be generating comments, so turn on app. */ |
801 | if (flag_debug_asm) |
802 | app_enable (); |
803 | |
804 | /* Switch to the proper frame section, first time. */ |
805 | switch_to_frame_table_section (for_eh, back: false); |
806 | |
807 | ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh); |
808 | ASM_OUTPUT_LABEL (asm_out_file, section_start_label); |
809 | |
810 | /* Output the CIE. */ |
811 | ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh); |
812 | ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh); |
813 | if (!XCOFF_DEBUGGING_INFO || for_eh) |
814 | { |
815 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh) |
816 | dw2_asm_output_data (4, 0xffffffff, |
817 | "Initial length escape value indicating 64-bit DWARF extension" ); |
818 | dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1, |
819 | "Length of Common Information Entry" ); |
820 | } |
821 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
822 | |
823 | /* Now that the CIE pointer is PC-relative for EH, |
824 | use 0 to identify the CIE. */ |
825 | dw2_asm_output_data ((for_eh ? 4 : dwarf_offset_size), |
826 | (for_eh ? 0 : DWARF_CIE_ID), |
827 | "CIE Identifier Tag" ); |
828 | |
829 | /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to |
830 | use CIE version 1, unless that would produce incorrect results |
831 | due to overflowing the return register column. */ |
832 | return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh); |
833 | dw_cie_version = 1; |
834 | if (return_reg >= 256 || dwarf_version > 2) |
835 | dw_cie_version = 3; |
836 | dw2_asm_output_data (1, dw_cie_version, "CIE Version" ); |
837 | |
838 | augmentation[0] = 0; |
839 | augmentation_size = 0; |
840 | |
841 | personality = current_unit_personality; |
842 | if (for_eh) |
843 | { |
844 | char *p; |
845 | |
846 | /* Augmentation: |
847 | z Indicates that a uleb128 is present to size the |
848 | augmentation section. |
849 | L Indicates the encoding (and thus presence) of |
850 | an LSDA pointer in the FDE augmentation. |
851 | R Indicates a non-default pointer encoding for |
852 | FDE code pointers. |
853 | P Indicates the presence of an encoding + language |
854 | personality routine in the CIE augmentation. */ |
855 | |
856 | fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0); |
857 | per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1); |
858 | lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0); |
859 | |
860 | p = augmentation + 1; |
861 | if (personality) |
862 | { |
863 | *p++ = 'P'; |
864 | augmentation_size += 1 + size_of_encoded_value (per_encoding); |
865 | assemble_external_libcall (personality); |
866 | } |
867 | if (any_lsda_needed) |
868 | { |
869 | *p++ = 'L'; |
870 | augmentation_size += 1; |
871 | } |
872 | if (fde_encoding != DW_EH_PE_absptr) |
873 | { |
874 | *p++ = 'R'; |
875 | augmentation_size += 1; |
876 | } |
877 | if (p > augmentation + 1) |
878 | { |
879 | augmentation[0] = 'z'; |
880 | *p = '\0'; |
881 | } |
882 | |
883 | /* Ug. Some platforms can't do unaligned dynamic relocations at all. */ |
884 | if (personality && per_encoding == DW_EH_PE_aligned) |
885 | { |
886 | int offset = ( 4 /* Length */ |
887 | + 4 /* CIE Id */ |
888 | + 1 /* CIE version */ |
889 | + strlen (s: augmentation) + 1 /* Augmentation */ |
890 | + size_of_uleb128 (1) /* Code alignment */ |
891 | + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT) |
892 | + 1 /* RA column */ |
893 | + 1 /* Augmentation size */ |
894 | + 1 /* Personality encoding */ ); |
895 | int pad = -offset & (PTR_SIZE - 1); |
896 | |
897 | augmentation_size += pad; |
898 | |
899 | /* Augmentations should be small, so there's scarce need to |
900 | iterate for a solution. Die if we exceed one uleb128 byte. */ |
901 | gcc_assert (size_of_uleb128 (augmentation_size) == 1); |
902 | } |
903 | } |
904 | |
905 | dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation" ); |
906 | if (dw_cie_version >= 4) |
907 | { |
908 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size" ); |
909 | dw2_asm_output_data (1, 0, "CIE Segment Size" ); |
910 | } |
911 | dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor" ); |
912 | dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT, |
913 | "CIE Data Alignment Factor" ); |
914 | |
915 | if (dw_cie_version == 1) |
916 | dw2_asm_output_data (1, return_reg, "CIE RA Column" ); |
917 | else |
918 | dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column" ); |
919 | |
920 | if (augmentation[0]) |
921 | { |
922 | dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size" ); |
923 | if (personality) |
924 | { |
925 | dw2_asm_output_data (1, per_encoding, "Personality (%s)" , |
926 | eh_data_format_name (per_encoding)); |
927 | dw2_asm_output_encoded_addr_rtx (per_encoding, |
928 | personality, |
929 | true, NULL); |
930 | } |
931 | |
932 | if (any_lsda_needed) |
933 | dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)" , |
934 | eh_data_format_name (lsda_encoding)); |
935 | |
936 | if (fde_encoding != DW_EH_PE_absptr) |
937 | dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)" , |
938 | eh_data_format_name (fde_encoding)); |
939 | } |
940 | |
941 | FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi) |
942 | output_cfi (cfi, NULL, for_eh); |
943 | |
944 | /* Pad the CIE out to an address sized boundary. */ |
945 | ASM_OUTPUT_ALIGN (asm_out_file, |
946 | floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)); |
947 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
948 | |
949 | /* Loop through all of the FDE's. */ |
950 | FOR_EACH_VEC_ELT (*fde_vec, i, fde) |
951 | { |
952 | unsigned int k; |
953 | |
954 | /* Don't emit EH unwind info for leaf functions that don't need it. */ |
955 | if (for_eh && !fde_needed_for_eh_p (fde)) |
956 | continue; |
957 | |
958 | for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++) |
959 | output_fde (fde, for_eh, second: k, section_start_label, fde_encoding, |
960 | augmentation, any_lsda_needed, lsda_encoding); |
961 | } |
962 | |
963 | if (for_eh && targetm.terminate_dw2_eh_frame_info) |
964 | dw2_asm_output_data (4, 0, "End of Table" ); |
965 | |
966 | /* Turn off app to make assembly quicker. */ |
967 | if (flag_debug_asm) |
968 | app_disable (); |
969 | } |
970 | |
971 | /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */ |
972 | |
973 | static void |
974 | dwarf2out_do_cfi_startproc (bool second) |
975 | { |
976 | int enc; |
977 | rtx ref; |
978 | |
979 | fprintf (stream: asm_out_file, format: "\t.cfi_startproc\n" ); |
980 | |
981 | targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl); |
982 | |
983 | /* .cfi_personality and .cfi_lsda are only relevant to DWARF2 |
984 | eh unwinders. */ |
985 | if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2) |
986 | return; |
987 | |
988 | rtx personality = get_personality_function (current_function_decl); |
989 | |
990 | if (personality) |
991 | { |
992 | enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1); |
993 | ref = personality; |
994 | |
995 | /* ??? The GAS support isn't entirely consistent. We have to |
996 | handle indirect support ourselves, but PC-relative is done |
997 | in the assembler. Further, the assembler can't handle any |
998 | of the weirder relocation types. */ |
999 | if (enc & DW_EH_PE_indirect) |
1000 | { |
1001 | if (targetm.asm_out.make_eh_symbol_indirect != NULL) |
1002 | ref = targetm.asm_out.make_eh_symbol_indirect (ref, true); |
1003 | else |
1004 | ref = dw2_force_const_mem (ref, true); |
1005 | } |
1006 | |
1007 | fprintf (stream: asm_out_file, format: "\t.cfi_personality %#x," , enc); |
1008 | output_addr_const (asm_out_file, ref); |
1009 | fputc (c: '\n', stream: asm_out_file); |
1010 | } |
1011 | |
1012 | if (crtl->uses_eh_lsda) |
1013 | { |
1014 | char lab[MAX_ARTIFICIAL_LABEL_BYTES]; |
1015 | |
1016 | enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0); |
1017 | ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA" , |
1018 | current_function_funcdef_no); |
1019 | ref = gen_rtx_SYMBOL_REF (Pmode, lab); |
1020 | SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL; |
1021 | |
1022 | if (enc & DW_EH_PE_indirect) |
1023 | { |
1024 | if (targetm.asm_out.make_eh_symbol_indirect != NULL) |
1025 | ref = targetm.asm_out.make_eh_symbol_indirect (ref, true); |
1026 | else |
1027 | ref = dw2_force_const_mem (ref, true); |
1028 | } |
1029 | |
1030 | fprintf (stream: asm_out_file, format: "\t.cfi_lsda %#x," , enc); |
1031 | output_addr_const (asm_out_file, ref); |
1032 | fputc (c: '\n', stream: asm_out_file); |
1033 | } |
1034 | } |
1035 | |
1036 | /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that |
1037 | this allocation may be done before pass_final. */ |
1038 | |
1039 | dw_fde_ref |
1040 | dwarf2out_alloc_current_fde (void) |
1041 | { |
1042 | dw_fde_ref fde; |
1043 | |
1044 | fde = ggc_cleared_alloc<dw_fde_node> (); |
1045 | fde->decl = current_function_decl; |
1046 | fde->funcdef_number = current_function_funcdef_no; |
1047 | fde->fde_index = vec_safe_length (v: fde_vec); |
1048 | fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls; |
1049 | fde->uses_eh_lsda = crtl->uses_eh_lsda; |
1050 | fde->nothrow = crtl->nothrow; |
1051 | fde->drap_reg = INVALID_REGNUM; |
1052 | fde->vdrap_reg = INVALID_REGNUM; |
1053 | |
1054 | /* Record the FDE associated with this function. */ |
1055 | cfun->fde = fde; |
1056 | vec_safe_push (v&: fde_vec, obj: fde); |
1057 | |
1058 | return fde; |
1059 | } |
1060 | |
1061 | /* Output a marker (i.e. a label) for the beginning of a function, before |
1062 | the prologue. */ |
1063 | |
1064 | void |
1065 | dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED, |
1066 | unsigned int column ATTRIBUTE_UNUSED, |
1067 | const char *file ATTRIBUTE_UNUSED) |
1068 | { |
1069 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1070 | char * dup_label; |
1071 | dw_fde_ref fde; |
1072 | section *fnsec; |
1073 | bool do_frame; |
1074 | |
1075 | current_function_func_begin_label = NULL; |
1076 | |
1077 | do_frame = dwarf2out_do_frame (); |
1078 | |
1079 | /* ??? current_function_func_begin_label is also used by except.cc for |
1080 | call-site information. We must emit this label if it might be used. */ |
1081 | if (!do_frame |
1082 | && (!flag_exceptions |
1083 | || targetm_common.except_unwind_info (&global_options) == UI_SJLJ)) |
1084 | return; |
1085 | |
1086 | fnsec = function_section (current_function_decl); |
1087 | switch_to_section (fnsec); |
1088 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL, |
1089 | current_function_funcdef_no); |
1090 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL, |
1091 | current_function_funcdef_no); |
1092 | dup_label = xstrdup (label); |
1093 | current_function_func_begin_label = dup_label; |
1094 | |
1095 | /* We can elide FDE allocation if we're not emitting frame unwind info. */ |
1096 | if (!do_frame) |
1097 | return; |
1098 | |
1099 | /* Unlike the debug version, the EH version of frame unwind info is a per- |
1100 | function setting so we need to record whether we need it for the unit. */ |
1101 | do_eh_frame |= dwarf2out_do_eh_frame (); |
1102 | |
1103 | /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that |
1104 | emit insns as rtx but bypass the bulk of rest_of_compilation, which |
1105 | would include pass_dwarf2_frame. If we've not created the FDE yet, |
1106 | do so now. */ |
1107 | fde = cfun->fde; |
1108 | if (fde == NULL) |
1109 | fde = dwarf2out_alloc_current_fde (); |
1110 | |
1111 | /* Initialize the bits of CURRENT_FDE that were not available earlier. */ |
1112 | fde->dw_fde_begin = dup_label; |
1113 | fde->dw_fde_current_label = dup_label; |
1114 | fde->in_std_section = (fnsec == text_section |
1115 | || (cold_text_section && fnsec == cold_text_section)); |
1116 | fde->ignored_debug = DECL_IGNORED_P (current_function_decl); |
1117 | in_text_section_p = fnsec == text_section; |
1118 | |
1119 | /* We only want to output line number information for the genuine dwarf2 |
1120 | prologue case, not the eh frame case. */ |
1121 | #ifdef DWARF2_DEBUGGING_INFO |
1122 | if (file) |
1123 | dwarf2out_source_line (line, column, file, 0, true); |
1124 | #endif |
1125 | |
1126 | if (dwarf2out_do_cfi_asm ()) |
1127 | dwarf2out_do_cfi_startproc (second: false); |
1128 | else |
1129 | { |
1130 | rtx personality = get_personality_function (current_function_decl); |
1131 | if (!current_unit_personality) |
1132 | current_unit_personality = personality; |
1133 | |
1134 | /* We cannot keep a current personality per function as without CFI |
1135 | asm, at the point where we emit the CFI data, there is no current |
1136 | function anymore. */ |
1137 | if (personality && current_unit_personality != personality) |
1138 | sorry ("multiple EH personalities are supported only with assemblers " |
1139 | "supporting %<.cfi_personality%> directive" ); |
1140 | } |
1141 | } |
1142 | |
1143 | /* Output a marker (i.e. a label) for the end of the generated code |
1144 | for a function prologue. This gets called *after* the prologue code has |
1145 | been generated. */ |
1146 | |
1147 | void |
1148 | dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED, |
1149 | const char *file ATTRIBUTE_UNUSED) |
1150 | { |
1151 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1152 | |
1153 | /* Output a label to mark the endpoint of the code generated for this |
1154 | function. */ |
1155 | ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL, |
1156 | current_function_funcdef_no); |
1157 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL, |
1158 | current_function_funcdef_no); |
1159 | cfun->fde->dw_fde_vms_end_prologue = xstrdup (label); |
1160 | } |
1161 | |
1162 | /* Output a marker (i.e. a label) for the beginning of the generated code |
1163 | for a function epilogue. This gets called *before* the prologue code has |
1164 | been generated. */ |
1165 | |
1166 | void |
1167 | dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED, |
1168 | const char *file ATTRIBUTE_UNUSED) |
1169 | { |
1170 | dw_fde_ref fde = cfun->fde; |
1171 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1172 | |
1173 | if (fde->dw_fde_vms_begin_epilogue) |
1174 | return; |
1175 | |
1176 | /* Output a label to mark the endpoint of the code generated for this |
1177 | function. */ |
1178 | ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL, |
1179 | current_function_funcdef_no); |
1180 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL, |
1181 | current_function_funcdef_no); |
1182 | fde->dw_fde_vms_begin_epilogue = xstrdup (label); |
1183 | } |
1184 | |
1185 | /* Mark the ranges of non-debug subsections in the std text sections. */ |
1186 | |
1187 | static void |
1188 | mark_ignored_debug_section (dw_fde_ref fde, bool second) |
1189 | { |
1190 | bool std_section; |
1191 | const char *begin_label, *end_label; |
1192 | const char **last_end_label; |
1193 | vec<const char *, va_gc> **switch_ranges; |
1194 | |
1195 | if (second) |
1196 | { |
1197 | std_section = fde->second_in_std_section; |
1198 | begin_label = fde->dw_fde_second_begin; |
1199 | end_label = fde->dw_fde_second_end; |
1200 | } |
1201 | else |
1202 | { |
1203 | std_section = fde->in_std_section; |
1204 | begin_label = fde->dw_fde_begin; |
1205 | end_label = fde->dw_fde_end; |
1206 | } |
1207 | |
1208 | if (!std_section) |
1209 | return; |
1210 | |
1211 | if (in_text_section_p) |
1212 | { |
1213 | last_end_label = &last_text_label; |
1214 | switch_ranges = &switch_text_ranges; |
1215 | } |
1216 | else |
1217 | { |
1218 | last_end_label = &last_cold_label; |
1219 | switch_ranges = &switch_cold_ranges; |
1220 | } |
1221 | |
1222 | if (fde->ignored_debug) |
1223 | { |
1224 | if (*switch_ranges && !(vec_safe_length (v: *switch_ranges) & 1)) |
1225 | vec_safe_push (v&: *switch_ranges, obj: *last_end_label); |
1226 | } |
1227 | else |
1228 | { |
1229 | *last_end_label = end_label; |
1230 | |
1231 | if (!*switch_ranges) |
1232 | vec_alloc (v&: *switch_ranges, nelems: 16); |
1233 | else if (vec_safe_length (v: *switch_ranges) & 1) |
1234 | vec_safe_push (v&: *switch_ranges, obj: begin_label); |
1235 | } |
1236 | } |
1237 | |
1238 | /* Output a marker (i.e. a label) for the absolute end of the generated code |
1239 | for a function definition. This gets called *after* the epilogue code has |
1240 | been generated. */ |
1241 | |
1242 | void |
1243 | dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED, |
1244 | const char *file ATTRIBUTE_UNUSED) |
1245 | { |
1246 | dw_fde_ref fde; |
1247 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1248 | |
1249 | last_var_location_insn = NULL; |
1250 | cached_next_real_insn = NULL; |
1251 | |
1252 | if (dwarf2out_do_cfi_asm ()) |
1253 | fprintf (stream: asm_out_file, format: "\t.cfi_endproc\n" ); |
1254 | |
1255 | /* Output a label to mark the endpoint of the code generated for this |
1256 | function. */ |
1257 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL, |
1258 | current_function_funcdef_no); |
1259 | ASM_OUTPUT_LABEL (asm_out_file, label); |
1260 | fde = cfun->fde; |
1261 | gcc_assert (fde != NULL); |
1262 | if (fde->dw_fde_second_begin == NULL) |
1263 | fde->dw_fde_end = xstrdup (label); |
1264 | |
1265 | mark_ignored_debug_section (fde, second: fde->dw_fde_second_begin != NULL); |
1266 | } |
1267 | |
1268 | void |
1269 | dwarf2out_frame_finish (void) |
1270 | { |
1271 | /* Output call frame information. */ |
1272 | if (targetm.debug_unwind_info () == UI_DWARF2) |
1273 | output_call_frame_info (for_eh: 0); |
1274 | |
1275 | /* Output another copy for the unwinder. */ |
1276 | if (do_eh_frame) |
1277 | output_call_frame_info (for_eh: 1); |
1278 | } |
1279 | |
1280 | static void var_location_switch_text_section (void); |
1281 | static void set_cur_line_info_table (section *); |
1282 | |
1283 | void |
1284 | dwarf2out_switch_text_section (void) |
1285 | { |
1286 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
1287 | section *sect; |
1288 | dw_fde_ref fde = cfun->fde; |
1289 | |
1290 | gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL); |
1291 | |
1292 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL, |
1293 | current_function_funcdef_no); |
1294 | |
1295 | fde->dw_fde_second_begin = ggc_strdup (label); |
1296 | if (!in_cold_section_p) |
1297 | { |
1298 | fde->dw_fde_end = crtl->subsections.cold_section_end_label; |
1299 | fde->dw_fde_second_end = crtl->subsections.hot_section_end_label; |
1300 | } |
1301 | else |
1302 | { |
1303 | fde->dw_fde_end = crtl->subsections.hot_section_end_label; |
1304 | fde->dw_fde_second_end = crtl->subsections.cold_section_end_label; |
1305 | } |
1306 | have_multiple_function_sections = true; |
1307 | |
1308 | if (dwarf2out_do_cfi_asm ()) |
1309 | fprintf (stream: asm_out_file, format: "\t.cfi_endproc\n" ); |
1310 | |
1311 | mark_ignored_debug_section (fde, second: false); |
1312 | |
1313 | /* Now do the real section switch. */ |
1314 | sect = current_function_section (); |
1315 | switch_to_section (sect); |
1316 | |
1317 | fde->second_in_std_section |
1318 | = (sect == text_section |
1319 | || (cold_text_section && sect == cold_text_section)); |
1320 | in_text_section_p = sect == text_section; |
1321 | |
1322 | if (dwarf2out_do_cfi_asm ()) |
1323 | dwarf2out_do_cfi_startproc (second: true); |
1324 | |
1325 | var_location_switch_text_section (); |
1326 | |
1327 | if (cold_text_section != NULL) |
1328 | set_cur_line_info_table (sect); |
1329 | } |
1330 | |
1331 | /* And now, the subset of the debugging information support code necessary |
1332 | for emitting location expressions. */ |
1333 | |
1334 | /* Describe an entry into the .debug_addr section. */ |
1335 | |
1336 | enum ate_kind { |
1337 | ate_kind_rtx, |
1338 | ate_kind_rtx_dtprel, |
1339 | ate_kind_label |
1340 | }; |
1341 | |
1342 | struct GTY((for_user)) addr_table_entry { |
1343 | enum ate_kind kind; |
1344 | unsigned int refcount; |
1345 | unsigned int index; |
1346 | union addr_table_entry_struct_union |
1347 | { |
1348 | rtx GTY ((tag ("0" ))) rtl; |
1349 | char * GTY ((tag ("1" ))) label; |
1350 | } |
1351 | GTY ((desc ("%1.kind" ))) addr; |
1352 | }; |
1353 | |
1354 | typedef unsigned int var_loc_view; |
1355 | |
1356 | /* Location lists are ranges + location descriptions for that range, |
1357 | so you can track variables that are in different places over |
1358 | their entire life. */ |
1359 | typedef struct GTY(()) dw_loc_list_struct { |
1360 | dw_loc_list_ref dw_loc_next; |
1361 | const char *begin; /* Label and addr_entry for start of range */ |
1362 | addr_table_entry *begin_entry; |
1363 | const char *end; /* Label for end of range */ |
1364 | addr_table_entry *end_entry; |
1365 | char *ll_symbol; /* Label for beginning of location list. |
1366 | Only on head of list. */ |
1367 | char *vl_symbol; /* Label for beginning of view list. Ditto. */ |
1368 | const char *section; /* Section this loclist is relative to */ |
1369 | dw_loc_descr_ref expr; |
1370 | var_loc_view vbegin, vend; |
1371 | hashval_t hash; |
1372 | /* True if all addresses in this and subsequent lists are known to be |
1373 | resolved. */ |
1374 | bool resolved_addr; |
1375 | /* True if this list has been replaced by dw_loc_next. */ |
1376 | bool replaced; |
1377 | /* True if it has been emitted into .debug_loc* / .debug_loclists* |
1378 | section. */ |
1379 | unsigned char emitted : 1; |
1380 | /* True if hash field is index rather than hash value. */ |
1381 | unsigned char num_assigned : 1; |
1382 | /* True if .debug_loclists.dwo offset has been emitted for it already. */ |
1383 | unsigned char offset_emitted : 1; |
1384 | /* True if note_variable_value_in_expr has been called on it. */ |
1385 | unsigned char noted_variable_value : 1; |
1386 | /* True if the range should be emitted even if begin and end |
1387 | are the same. */ |
1388 | bool force; |
1389 | } dw_loc_list_node; |
1390 | |
1391 | static dw_loc_descr_ref int_loc_descriptor (poly_int64); |
1392 | static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT); |
1393 | |
1394 | /* Convert a DWARF stack opcode into its string name. */ |
1395 | |
1396 | static const char * |
1397 | dwarf_stack_op_name (unsigned int op) |
1398 | { |
1399 | const char *name = get_DW_OP_name (op); |
1400 | |
1401 | if (name != NULL) |
1402 | return name; |
1403 | |
1404 | return "OP_<unknown>" ; |
1405 | } |
1406 | |
1407 | /* Return TRUE iff we're to output location view lists as a separate |
1408 | attribute next to the location lists, as an extension compatible |
1409 | with DWARF 2 and above. */ |
1410 | |
1411 | static inline bool |
1412 | dwarf2out_locviews_in_attribute () |
1413 | { |
1414 | return debug_variable_location_views == 1; |
1415 | } |
1416 | |
1417 | /* Return TRUE iff we're to output location view lists as part of the |
1418 | location lists, as proposed for standardization after DWARF 5. */ |
1419 | |
1420 | static inline bool |
1421 | dwarf2out_locviews_in_loclist () |
1422 | { |
1423 | #ifndef DW_LLE_view_pair |
1424 | return false; |
1425 | #else |
1426 | return debug_variable_location_views == -1; |
1427 | #endif |
1428 | } |
1429 | |
1430 | /* Return a pointer to a newly allocated location description. Location |
1431 | descriptions are simple expression terms that can be strung |
1432 | together to form more complicated location (address) descriptions. */ |
1433 | |
1434 | static inline dw_loc_descr_ref |
1435 | new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1, |
1436 | unsigned HOST_WIDE_INT oprnd2) |
1437 | { |
1438 | dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> (); |
1439 | |
1440 | descr->dw_loc_opc = op; |
1441 | descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const; |
1442 | descr->dw_loc_oprnd1.val_entry = NULL; |
1443 | descr->dw_loc_oprnd1.v.val_unsigned = oprnd1; |
1444 | descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const; |
1445 | descr->dw_loc_oprnd2.val_entry = NULL; |
1446 | descr->dw_loc_oprnd2.v.val_unsigned = oprnd2; |
1447 | |
1448 | return descr; |
1449 | } |
1450 | |
1451 | /* Add a location description term to a location description expression. */ |
1452 | |
1453 | static inline void |
1454 | add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr) |
1455 | { |
1456 | dw_loc_descr_ref *d; |
1457 | |
1458 | /* Find the end of the chain. */ |
1459 | for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next) |
1460 | ; |
1461 | |
1462 | *d = descr; |
1463 | } |
1464 | |
1465 | /* Compare two location operands for exact equality. */ |
1466 | |
1467 | static bool |
1468 | dw_val_equal_p (dw_val_node *a, dw_val_node *b) |
1469 | { |
1470 | if (a->val_class != b->val_class) |
1471 | return false; |
1472 | switch (a->val_class) |
1473 | { |
1474 | case dw_val_class_none: |
1475 | return true; |
1476 | case dw_val_class_addr: |
1477 | return rtx_equal_p (a->v.val_addr, b->v.val_addr); |
1478 | |
1479 | case dw_val_class_offset: |
1480 | case dw_val_class_unsigned_const: |
1481 | case dw_val_class_const: |
1482 | case dw_val_class_unsigned_const_implicit: |
1483 | case dw_val_class_const_implicit: |
1484 | case dw_val_class_range_list: |
1485 | /* These are all HOST_WIDE_INT, signed or unsigned. */ |
1486 | return a->v.val_unsigned == b->v.val_unsigned; |
1487 | |
1488 | case dw_val_class_loc: |
1489 | return a->v.val_loc == b->v.val_loc; |
1490 | case dw_val_class_loc_list: |
1491 | return a->v.val_loc_list == b->v.val_loc_list; |
1492 | case dw_val_class_view_list: |
1493 | return a->v.val_view_list == b->v.val_view_list; |
1494 | case dw_val_class_die_ref: |
1495 | return a->v.val_die_ref.die == b->v.val_die_ref.die; |
1496 | case dw_val_class_fde_ref: |
1497 | return a->v.val_fde_index == b->v.val_fde_index; |
1498 | case dw_val_class_symview: |
1499 | return strcmp (s1: a->v.val_symbolic_view, s2: b->v.val_symbolic_view) == 0; |
1500 | case dw_val_class_lbl_id: |
1501 | case dw_val_class_lineptr: |
1502 | case dw_val_class_macptr: |
1503 | case dw_val_class_loclistsptr: |
1504 | case dw_val_class_high_pc: |
1505 | return strcmp (s1: a->v.val_lbl_id, s2: b->v.val_lbl_id) == 0; |
1506 | case dw_val_class_str: |
1507 | return a->v.val_str == b->v.val_str; |
1508 | case dw_val_class_flag: |
1509 | return a->v.val_flag == b->v.val_flag; |
1510 | case dw_val_class_file: |
1511 | case dw_val_class_file_implicit: |
1512 | return a->v.val_file == b->v.val_file; |
1513 | case dw_val_class_decl_ref: |
1514 | return a->v.val_decl_ref == b->v.val_decl_ref; |
1515 | |
1516 | case dw_val_class_const_double: |
1517 | return (a->v.val_double.high == b->v.val_double.high |
1518 | && a->v.val_double.low == b->v.val_double.low); |
1519 | |
1520 | case dw_val_class_wide_int: |
1521 | return *a->v.val_wide == *b->v.val_wide; |
1522 | |
1523 | case dw_val_class_vec: |
1524 | { |
1525 | size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length; |
1526 | size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length; |
1527 | |
1528 | return (a_len == b_len |
1529 | && !memcmp (s1: a->v.val_vec.array, s2: b->v.val_vec.array, n: a_len)); |
1530 | } |
1531 | |
1532 | case dw_val_class_data8: |
1533 | return memcmp (s1: a->v.val_data8, s2: b->v.val_data8, n: 8) == 0; |
1534 | |
1535 | case dw_val_class_vms_delta: |
1536 | return (!strcmp (s1: a->v.val_vms_delta.lbl1, s2: b->v.val_vms_delta.lbl1) |
1537 | && !strcmp (s1: a->v.val_vms_delta.lbl2, s2: b->v.val_vms_delta.lbl2)); |
1538 | |
1539 | case dw_val_class_discr_value: |
1540 | return (a->v.val_discr_value.pos == b->v.val_discr_value.pos |
1541 | && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval); |
1542 | case dw_val_class_discr_list: |
1543 | /* It makes no sense comparing two discriminant value lists. */ |
1544 | return false; |
1545 | } |
1546 | gcc_unreachable (); |
1547 | } |
1548 | |
1549 | /* Compare two location atoms for exact equality. */ |
1550 | |
1551 | static bool |
1552 | loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b) |
1553 | { |
1554 | if (a->dw_loc_opc != b->dw_loc_opc) |
1555 | return false; |
1556 | |
1557 | /* ??? This is only ever set for DW_OP_constNu, for N equal to the |
1558 | address size, but since we always allocate cleared storage it |
1559 | should be zero for other types of locations. */ |
1560 | if (a->dtprel != b->dtprel) |
1561 | return false; |
1562 | |
1563 | return (dw_val_equal_p (a: &a->dw_loc_oprnd1, b: &b->dw_loc_oprnd1) |
1564 | && dw_val_equal_p (a: &a->dw_loc_oprnd2, b: &b->dw_loc_oprnd2)); |
1565 | } |
1566 | |
1567 | /* Compare two complete location expressions for exact equality. */ |
1568 | |
1569 | bool |
1570 | loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b) |
1571 | { |
1572 | while (1) |
1573 | { |
1574 | if (a == b) |
1575 | return true; |
1576 | if (a == NULL || b == NULL) |
1577 | return false; |
1578 | if (!loc_descr_equal_p_1 (a, b)) |
1579 | return false; |
1580 | |
1581 | a = a->dw_loc_next; |
1582 | b = b->dw_loc_next; |
1583 | } |
1584 | } |
1585 | |
1586 | |
1587 | /* Add a constant POLY_OFFSET to a location expression. */ |
1588 | |
1589 | static void |
1590 | loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset) |
1591 | { |
1592 | dw_loc_descr_ref loc; |
1593 | HOST_WIDE_INT *p; |
1594 | |
1595 | gcc_assert (*list_head != NULL); |
1596 | |
1597 | if (known_eq (poly_offset, 0)) |
1598 | return; |
1599 | |
1600 | /* Find the end of the chain. */ |
1601 | for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next) |
1602 | ; |
1603 | |
1604 | HOST_WIDE_INT offset; |
1605 | if (!poly_offset.is_constant (const_value: &offset)) |
1606 | { |
1607 | loc->dw_loc_next = int_loc_descriptor (poly_offset); |
1608 | add_loc_descr (list_head: &loc->dw_loc_next, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
1609 | return; |
1610 | } |
1611 | |
1612 | p = NULL; |
1613 | if (loc->dw_loc_opc == DW_OP_fbreg |
1614 | || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31)) |
1615 | p = &loc->dw_loc_oprnd1.v.val_int; |
1616 | else if (loc->dw_loc_opc == DW_OP_bregx) |
1617 | p = &loc->dw_loc_oprnd2.v.val_int; |
1618 | |
1619 | /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its |
1620 | offset. Don't optimize if an signed integer overflow would happen. */ |
1621 | if (p != NULL |
1622 | && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset) |
1623 | || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset))) |
1624 | *p += offset; |
1625 | |
1626 | else if (offset > 0) |
1627 | loc->dw_loc_next = new_loc_descr (op: DW_OP_plus_uconst, oprnd1: offset, oprnd2: 0); |
1628 | |
1629 | else |
1630 | { |
1631 | loc->dw_loc_next |
1632 | = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset); |
1633 | add_loc_descr (list_head: &loc->dw_loc_next, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
1634 | } |
1635 | } |
1636 | |
1637 | /* Return a pointer to a newly allocated location description for |
1638 | REG and OFFSET. */ |
1639 | |
1640 | static inline dw_loc_descr_ref |
1641 | new_reg_loc_descr (unsigned int reg, poly_int64 offset) |
1642 | { |
1643 | HOST_WIDE_INT const_offset; |
1644 | if (offset.is_constant (const_value: &const_offset)) |
1645 | { |
1646 | if (reg <= 31) |
1647 | return new_loc_descr (op: (enum dwarf_location_atom) (DW_OP_breg0 + reg), |
1648 | oprnd1: const_offset, oprnd2: 0); |
1649 | else |
1650 | return new_loc_descr (op: DW_OP_bregx, oprnd1: reg, oprnd2: const_offset); |
1651 | } |
1652 | else |
1653 | { |
1654 | dw_loc_descr_ref ret = new_reg_loc_descr (reg, offset: 0); |
1655 | loc_descr_plus_const (list_head: &ret, poly_offset: offset); |
1656 | return ret; |
1657 | } |
1658 | } |
1659 | |
1660 | /* Add a constant OFFSET to a location list. */ |
1661 | |
1662 | static void |
1663 | loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset) |
1664 | { |
1665 | dw_loc_list_ref d; |
1666 | for (d = list_head; d != NULL; d = d->dw_loc_next) |
1667 | loc_descr_plus_const (list_head: &d->expr, poly_offset: offset); |
1668 | } |
1669 | |
1670 | #define DWARF_REF_SIZE \ |
1671 | (dwarf_version == 2 ? DWARF2_ADDR_SIZE : dwarf_offset_size) |
1672 | |
1673 | /* The number of bits that can be encoded by largest DW_FORM_dataN. |
1674 | In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5 |
1675 | DW_FORM_data16 with 128 bits. */ |
1676 | #define DWARF_LARGEST_DATA_FORM_BITS \ |
1677 | (dwarf_version >= 5 ? 128 : 64) |
1678 | |
1679 | /* Utility inline function for construction of ops that were GNU extension |
1680 | before DWARF 5. */ |
1681 | static inline enum dwarf_location_atom |
1682 | dwarf_OP (enum dwarf_location_atom op) |
1683 | { |
1684 | switch (op) |
1685 | { |
1686 | case DW_OP_implicit_pointer: |
1687 | if (dwarf_version < 5) |
1688 | return DW_OP_GNU_implicit_pointer; |
1689 | break; |
1690 | |
1691 | case DW_OP_entry_value: |
1692 | if (dwarf_version < 5) |
1693 | return DW_OP_GNU_entry_value; |
1694 | break; |
1695 | |
1696 | case DW_OP_const_type: |
1697 | if (dwarf_version < 5) |
1698 | return DW_OP_GNU_const_type; |
1699 | break; |
1700 | |
1701 | case DW_OP_regval_type: |
1702 | if (dwarf_version < 5) |
1703 | return DW_OP_GNU_regval_type; |
1704 | break; |
1705 | |
1706 | case DW_OP_deref_type: |
1707 | if (dwarf_version < 5) |
1708 | return DW_OP_GNU_deref_type; |
1709 | break; |
1710 | |
1711 | case DW_OP_convert: |
1712 | if (dwarf_version < 5) |
1713 | return DW_OP_GNU_convert; |
1714 | break; |
1715 | |
1716 | case DW_OP_reinterpret: |
1717 | if (dwarf_version < 5) |
1718 | return DW_OP_GNU_reinterpret; |
1719 | break; |
1720 | |
1721 | case DW_OP_addrx: |
1722 | if (dwarf_version < 5) |
1723 | return DW_OP_GNU_addr_index; |
1724 | break; |
1725 | |
1726 | case DW_OP_constx: |
1727 | if (dwarf_version < 5) |
1728 | return DW_OP_GNU_const_index; |
1729 | break; |
1730 | |
1731 | default: |
1732 | break; |
1733 | } |
1734 | return op; |
1735 | } |
1736 | |
1737 | /* Similarly for attributes. */ |
1738 | static inline enum dwarf_attribute |
1739 | dwarf_AT (enum dwarf_attribute at) |
1740 | { |
1741 | switch (at) |
1742 | { |
1743 | case DW_AT_call_return_pc: |
1744 | if (dwarf_version < 5) |
1745 | return DW_AT_low_pc; |
1746 | break; |
1747 | |
1748 | case DW_AT_call_tail_call: |
1749 | if (dwarf_version < 5) |
1750 | return DW_AT_GNU_tail_call; |
1751 | break; |
1752 | |
1753 | case DW_AT_call_origin: |
1754 | if (dwarf_version < 5) |
1755 | return DW_AT_abstract_origin; |
1756 | break; |
1757 | |
1758 | case DW_AT_call_target: |
1759 | if (dwarf_version < 5) |
1760 | return DW_AT_GNU_call_site_target; |
1761 | break; |
1762 | |
1763 | case DW_AT_call_target_clobbered: |
1764 | if (dwarf_version < 5) |
1765 | return DW_AT_GNU_call_site_target_clobbered; |
1766 | break; |
1767 | |
1768 | case DW_AT_call_parameter: |
1769 | if (dwarf_version < 5) |
1770 | return DW_AT_abstract_origin; |
1771 | break; |
1772 | |
1773 | case DW_AT_call_value: |
1774 | if (dwarf_version < 5) |
1775 | return DW_AT_GNU_call_site_value; |
1776 | break; |
1777 | |
1778 | case DW_AT_call_data_value: |
1779 | if (dwarf_version < 5) |
1780 | return DW_AT_GNU_call_site_data_value; |
1781 | break; |
1782 | |
1783 | case DW_AT_call_all_calls: |
1784 | if (dwarf_version < 5) |
1785 | return DW_AT_GNU_all_call_sites; |
1786 | break; |
1787 | |
1788 | case DW_AT_call_all_tail_calls: |
1789 | if (dwarf_version < 5) |
1790 | return DW_AT_GNU_all_tail_call_sites; |
1791 | break; |
1792 | |
1793 | case DW_AT_dwo_name: |
1794 | if (dwarf_version < 5) |
1795 | return DW_AT_GNU_dwo_name; |
1796 | break; |
1797 | |
1798 | case DW_AT_addr_base: |
1799 | if (dwarf_version < 5) |
1800 | return DW_AT_GNU_addr_base; |
1801 | break; |
1802 | |
1803 | default: |
1804 | break; |
1805 | } |
1806 | return at; |
1807 | } |
1808 | |
1809 | /* And similarly for tags. */ |
1810 | static inline enum dwarf_tag |
1811 | dwarf_TAG (enum dwarf_tag tag) |
1812 | { |
1813 | switch (tag) |
1814 | { |
1815 | case DW_TAG_call_site: |
1816 | if (dwarf_version < 5) |
1817 | return DW_TAG_GNU_call_site; |
1818 | break; |
1819 | |
1820 | case DW_TAG_call_site_parameter: |
1821 | if (dwarf_version < 5) |
1822 | return DW_TAG_GNU_call_site_parameter; |
1823 | break; |
1824 | |
1825 | default: |
1826 | break; |
1827 | } |
1828 | return tag; |
1829 | } |
1830 | |
1831 | /* And similarly for forms. */ |
1832 | static inline enum dwarf_form |
1833 | dwarf_FORM (enum dwarf_form form) |
1834 | { |
1835 | switch (form) |
1836 | { |
1837 | case DW_FORM_addrx: |
1838 | if (dwarf_version < 5) |
1839 | return DW_FORM_GNU_addr_index; |
1840 | break; |
1841 | |
1842 | case DW_FORM_strx: |
1843 | if (dwarf_version < 5) |
1844 | return DW_FORM_GNU_str_index; |
1845 | break; |
1846 | |
1847 | default: |
1848 | break; |
1849 | } |
1850 | return form; |
1851 | } |
1852 | |
1853 | static unsigned long int get_base_type_offset (dw_die_ref); |
1854 | |
1855 | /* Return the size of a location descriptor. */ |
1856 | |
1857 | static unsigned long |
1858 | size_of_loc_descr (dw_loc_descr_ref loc) |
1859 | { |
1860 | unsigned long size = 1; |
1861 | |
1862 | switch (loc->dw_loc_opc) |
1863 | { |
1864 | case DW_OP_addr: |
1865 | size += DWARF2_ADDR_SIZE; |
1866 | break; |
1867 | case DW_OP_GNU_addr_index: |
1868 | case DW_OP_addrx: |
1869 | case DW_OP_GNU_const_index: |
1870 | case DW_OP_constx: |
1871 | gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED); |
1872 | size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index); |
1873 | break; |
1874 | case DW_OP_const1u: |
1875 | case DW_OP_const1s: |
1876 | size += 1; |
1877 | break; |
1878 | case DW_OP_const2u: |
1879 | case DW_OP_const2s: |
1880 | size += 2; |
1881 | break; |
1882 | case DW_OP_const4u: |
1883 | case DW_OP_const4s: |
1884 | size += 4; |
1885 | break; |
1886 | case DW_OP_const8u: |
1887 | case DW_OP_const8s: |
1888 | size += 8; |
1889 | break; |
1890 | case DW_OP_constu: |
1891 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1892 | break; |
1893 | case DW_OP_consts: |
1894 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
1895 | break; |
1896 | case DW_OP_pick: |
1897 | size += 1; |
1898 | break; |
1899 | case DW_OP_plus_uconst: |
1900 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1901 | break; |
1902 | case DW_OP_skip: |
1903 | case DW_OP_bra: |
1904 | size += 2; |
1905 | break; |
1906 | case DW_OP_breg0: |
1907 | case DW_OP_breg1: |
1908 | case DW_OP_breg2: |
1909 | case DW_OP_breg3: |
1910 | case DW_OP_breg4: |
1911 | case DW_OP_breg5: |
1912 | case DW_OP_breg6: |
1913 | case DW_OP_breg7: |
1914 | case DW_OP_breg8: |
1915 | case DW_OP_breg9: |
1916 | case DW_OP_breg10: |
1917 | case DW_OP_breg11: |
1918 | case DW_OP_breg12: |
1919 | case DW_OP_breg13: |
1920 | case DW_OP_breg14: |
1921 | case DW_OP_breg15: |
1922 | case DW_OP_breg16: |
1923 | case DW_OP_breg17: |
1924 | case DW_OP_breg18: |
1925 | case DW_OP_breg19: |
1926 | case DW_OP_breg20: |
1927 | case DW_OP_breg21: |
1928 | case DW_OP_breg22: |
1929 | case DW_OP_breg23: |
1930 | case DW_OP_breg24: |
1931 | case DW_OP_breg25: |
1932 | case DW_OP_breg26: |
1933 | case DW_OP_breg27: |
1934 | case DW_OP_breg28: |
1935 | case DW_OP_breg29: |
1936 | case DW_OP_breg30: |
1937 | case DW_OP_breg31: |
1938 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
1939 | break; |
1940 | case DW_OP_regx: |
1941 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1942 | break; |
1943 | case DW_OP_fbreg: |
1944 | size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int); |
1945 | break; |
1946 | case DW_OP_bregx: |
1947 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1948 | size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int); |
1949 | break; |
1950 | case DW_OP_piece: |
1951 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1952 | break; |
1953 | case DW_OP_bit_piece: |
1954 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
1955 | size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned); |
1956 | break; |
1957 | case DW_OP_deref_size: |
1958 | case DW_OP_xderef_size: |
1959 | size += 1; |
1960 | break; |
1961 | case DW_OP_call2: |
1962 | size += 2; |
1963 | break; |
1964 | case DW_OP_call4: |
1965 | size += 4; |
1966 | break; |
1967 | case DW_OP_call_ref: |
1968 | case DW_OP_GNU_variable_value: |
1969 | size += DWARF_REF_SIZE; |
1970 | break; |
1971 | case DW_OP_implicit_value: |
1972 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned) |
1973 | + loc->dw_loc_oprnd1.v.val_unsigned; |
1974 | break; |
1975 | case DW_OP_implicit_pointer: |
1976 | case DW_OP_GNU_implicit_pointer: |
1977 | size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int); |
1978 | break; |
1979 | case DW_OP_entry_value: |
1980 | case DW_OP_GNU_entry_value: |
1981 | { |
1982 | unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc); |
1983 | size += size_of_uleb128 (op_size) + op_size; |
1984 | break; |
1985 | } |
1986 | case DW_OP_const_type: |
1987 | case DW_OP_GNU_const_type: |
1988 | { |
1989 | unsigned long o |
1990 | = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die); |
1991 | size += size_of_uleb128 (o) + 1; |
1992 | switch (loc->dw_loc_oprnd2.val_class) |
1993 | { |
1994 | case dw_val_class_vec: |
1995 | size += loc->dw_loc_oprnd2.v.val_vec.length |
1996 | * loc->dw_loc_oprnd2.v.val_vec.elt_size; |
1997 | break; |
1998 | case dw_val_class_const: |
1999 | size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT; |
2000 | break; |
2001 | case dw_val_class_const_double: |
2002 | size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT; |
2003 | break; |
2004 | case dw_val_class_wide_int: |
2005 | size += (get_full_len (op: *loc->dw_loc_oprnd2.v.val_wide) |
2006 | * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
2007 | break; |
2008 | default: |
2009 | gcc_unreachable (); |
2010 | } |
2011 | break; |
2012 | } |
2013 | case DW_OP_regval_type: |
2014 | case DW_OP_GNU_regval_type: |
2015 | { |
2016 | unsigned long o |
2017 | = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die); |
2018 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned) |
2019 | + size_of_uleb128 (o); |
2020 | } |
2021 | break; |
2022 | case DW_OP_deref_type: |
2023 | case DW_OP_GNU_deref_type: |
2024 | { |
2025 | unsigned long o |
2026 | = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die); |
2027 | size += 1 + size_of_uleb128 (o); |
2028 | } |
2029 | break; |
2030 | case DW_OP_convert: |
2031 | case DW_OP_reinterpret: |
2032 | case DW_OP_GNU_convert: |
2033 | case DW_OP_GNU_reinterpret: |
2034 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
2035 | size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned); |
2036 | else |
2037 | { |
2038 | unsigned long o |
2039 | = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die); |
2040 | size += size_of_uleb128 (o); |
2041 | } |
2042 | break; |
2043 | case DW_OP_GNU_parameter_ref: |
2044 | size += 4; |
2045 | break; |
2046 | default: |
2047 | break; |
2048 | } |
2049 | |
2050 | return size; |
2051 | } |
2052 | |
2053 | /* Return the size of a series of location descriptors. */ |
2054 | |
2055 | unsigned long |
2056 | size_of_locs (dw_loc_descr_ref loc) |
2057 | { |
2058 | dw_loc_descr_ref l; |
2059 | unsigned long size; |
2060 | |
2061 | /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr |
2062 | field, to avoid writing to a PCH file. */ |
2063 | for (size = 0, l = loc; l != NULL; l = l->dw_loc_next) |
2064 | { |
2065 | if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra) |
2066 | break; |
2067 | size += size_of_loc_descr (loc: l); |
2068 | } |
2069 | if (! l) |
2070 | return size; |
2071 | |
2072 | for (size = 0, l = loc; l != NULL; l = l->dw_loc_next) |
2073 | { |
2074 | l->dw_loc_addr = size; |
2075 | size += size_of_loc_descr (loc: l); |
2076 | } |
2077 | |
2078 | return size; |
2079 | } |
2080 | |
2081 | /* Return the size of the value in a DW_AT_discr_value attribute. */ |
2082 | |
2083 | static int |
2084 | size_of_discr_value (dw_discr_value *discr_value) |
2085 | { |
2086 | if (discr_value->pos) |
2087 | return size_of_uleb128 (discr_value->v.uval); |
2088 | else |
2089 | return size_of_sleb128 (discr_value->v.sval); |
2090 | } |
2091 | |
2092 | /* Return the size of the value in a DW_AT_discr_list attribute. */ |
2093 | |
2094 | static int |
2095 | size_of_discr_list (dw_discr_list_ref discr_list) |
2096 | { |
2097 | int size = 0; |
2098 | |
2099 | for (dw_discr_list_ref list = discr_list; |
2100 | list != NULL; |
2101 | list = list->dw_discr_next) |
2102 | { |
2103 | /* One byte for the discriminant value descriptor, and then one or two |
2104 | LEB128 numbers, depending on whether it's a single case label or a |
2105 | range label. */ |
2106 | size += 1; |
2107 | size += size_of_discr_value (discr_value: &list->dw_discr_lower_bound); |
2108 | if (list->dw_discr_range != 0) |
2109 | size += size_of_discr_value (discr_value: &list->dw_discr_upper_bound); |
2110 | } |
2111 | return size; |
2112 | } |
2113 | |
2114 | static HOST_WIDE_INT extract_int (const unsigned char *, unsigned); |
2115 | static void get_ref_die_offset_label (char *, dw_die_ref); |
2116 | static unsigned long int get_ref_die_offset (dw_die_ref); |
2117 | |
2118 | /* Output location description stack opcode's operands (if any). |
2119 | The for_eh_or_skip parameter controls whether register numbers are |
2120 | converted using DWARF2_FRAME_REG_OUT, which is needed in the case that |
2121 | hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind |
2122 | info). This should be suppressed for the cases that have not been converted |
2123 | (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */ |
2124 | |
2125 | static void |
2126 | output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip) |
2127 | { |
2128 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
2129 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
2130 | |
2131 | switch (loc->dw_loc_opc) |
2132 | { |
2133 | #ifdef DWARF2_DEBUGGING_INFO |
2134 | case DW_OP_const2u: |
2135 | case DW_OP_const2s: |
2136 | dw2_asm_output_data (2, val1->v.val_int, NULL); |
2137 | break; |
2138 | case DW_OP_const4u: |
2139 | if (loc->dtprel) |
2140 | { |
2141 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
2142 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4, |
2143 | val1->v.val_addr); |
2144 | fputc (c: '\n', stream: asm_out_file); |
2145 | break; |
2146 | } |
2147 | /* FALLTHRU */ |
2148 | case DW_OP_const4s: |
2149 | dw2_asm_output_data (4, val1->v.val_int, NULL); |
2150 | break; |
2151 | case DW_OP_const8u: |
2152 | if (loc->dtprel) |
2153 | { |
2154 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
2155 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8, |
2156 | val1->v.val_addr); |
2157 | fputc (c: '\n', stream: asm_out_file); |
2158 | break; |
2159 | } |
2160 | /* FALLTHRU */ |
2161 | case DW_OP_const8s: |
2162 | gcc_assert (HOST_BITS_PER_WIDE_INT >= 64); |
2163 | dw2_asm_output_data (8, val1->v.val_int, NULL); |
2164 | break; |
2165 | case DW_OP_skip: |
2166 | case DW_OP_bra: |
2167 | { |
2168 | int offset; |
2169 | |
2170 | gcc_assert (val1->val_class == dw_val_class_loc); |
2171 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
2172 | |
2173 | dw2_asm_output_data (2, offset, NULL); |
2174 | } |
2175 | break; |
2176 | case DW_OP_implicit_value: |
2177 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2178 | switch (val2->val_class) |
2179 | { |
2180 | case dw_val_class_const: |
2181 | dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL); |
2182 | break; |
2183 | case dw_val_class_vec: |
2184 | { |
2185 | unsigned int elt_size = val2->v.val_vec.elt_size; |
2186 | unsigned int len = val2->v.val_vec.length; |
2187 | unsigned int i; |
2188 | unsigned char *p; |
2189 | |
2190 | if (elt_size > sizeof (HOST_WIDE_INT)) |
2191 | { |
2192 | elt_size /= 2; |
2193 | len *= 2; |
2194 | } |
2195 | for (i = 0, p = (unsigned char *) val2->v.val_vec.array; |
2196 | i < len; |
2197 | i++, p += elt_size) |
2198 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
2199 | "fp or vector constant word %u" , i); |
2200 | } |
2201 | break; |
2202 | case dw_val_class_const_double: |
2203 | { |
2204 | unsigned HOST_WIDE_INT first, second; |
2205 | |
2206 | if (WORDS_BIG_ENDIAN) |
2207 | { |
2208 | first = val2->v.val_double.high; |
2209 | second = val2->v.val_double.low; |
2210 | } |
2211 | else |
2212 | { |
2213 | first = val2->v.val_double.low; |
2214 | second = val2->v.val_double.high; |
2215 | } |
2216 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
2217 | first, NULL); |
2218 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
2219 | second, NULL); |
2220 | } |
2221 | break; |
2222 | case dw_val_class_wide_int: |
2223 | { |
2224 | int i; |
2225 | int len = get_full_len (op: *val2->v.val_wide); |
2226 | if (WORDS_BIG_ENDIAN) |
2227 | for (i = len - 1; i >= 0; --i) |
2228 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
2229 | val2->v.val_wide->elt (i), NULL); |
2230 | else |
2231 | for (i = 0; i < len; ++i) |
2232 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
2233 | val2->v.val_wide->elt (i), NULL); |
2234 | } |
2235 | break; |
2236 | case dw_val_class_addr: |
2237 | gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE); |
2238 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL); |
2239 | break; |
2240 | default: |
2241 | gcc_unreachable (); |
2242 | } |
2243 | break; |
2244 | #else |
2245 | case DW_OP_const2u: |
2246 | case DW_OP_const2s: |
2247 | case DW_OP_const4u: |
2248 | case DW_OP_const4s: |
2249 | case DW_OP_const8u: |
2250 | case DW_OP_const8s: |
2251 | case DW_OP_skip: |
2252 | case DW_OP_bra: |
2253 | case DW_OP_implicit_value: |
2254 | /* We currently don't make any attempt to make sure these are |
2255 | aligned properly like we do for the main unwind info, so |
2256 | don't support emitting things larger than a byte if we're |
2257 | only doing unwinding. */ |
2258 | gcc_unreachable (); |
2259 | #endif |
2260 | case DW_OP_const1u: |
2261 | case DW_OP_const1s: |
2262 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
2263 | break; |
2264 | case DW_OP_constu: |
2265 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2266 | break; |
2267 | case DW_OP_consts: |
2268 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
2269 | break; |
2270 | case DW_OP_pick: |
2271 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
2272 | break; |
2273 | case DW_OP_plus_uconst: |
2274 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2275 | break; |
2276 | case DW_OP_breg0: |
2277 | case DW_OP_breg1: |
2278 | case DW_OP_breg2: |
2279 | case DW_OP_breg3: |
2280 | case DW_OP_breg4: |
2281 | case DW_OP_breg5: |
2282 | case DW_OP_breg6: |
2283 | case DW_OP_breg7: |
2284 | case DW_OP_breg8: |
2285 | case DW_OP_breg9: |
2286 | case DW_OP_breg10: |
2287 | case DW_OP_breg11: |
2288 | case DW_OP_breg12: |
2289 | case DW_OP_breg13: |
2290 | case DW_OP_breg14: |
2291 | case DW_OP_breg15: |
2292 | case DW_OP_breg16: |
2293 | case DW_OP_breg17: |
2294 | case DW_OP_breg18: |
2295 | case DW_OP_breg19: |
2296 | case DW_OP_breg20: |
2297 | case DW_OP_breg21: |
2298 | case DW_OP_breg22: |
2299 | case DW_OP_breg23: |
2300 | case DW_OP_breg24: |
2301 | case DW_OP_breg25: |
2302 | case DW_OP_breg26: |
2303 | case DW_OP_breg27: |
2304 | case DW_OP_breg28: |
2305 | case DW_OP_breg29: |
2306 | case DW_OP_breg30: |
2307 | case DW_OP_breg31: |
2308 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
2309 | break; |
2310 | case DW_OP_regx: |
2311 | { |
2312 | unsigned r = val1->v.val_unsigned; |
2313 | if (for_eh_or_skip >= 0) |
2314 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2315 | gcc_assert (size_of_uleb128 (r) |
2316 | == size_of_uleb128 (val1->v.val_unsigned)); |
2317 | dw2_asm_output_data_uleb128 (r, NULL); |
2318 | } |
2319 | break; |
2320 | case DW_OP_fbreg: |
2321 | dw2_asm_output_data_sleb128 (val1->v.val_int, NULL); |
2322 | break; |
2323 | case DW_OP_bregx: |
2324 | { |
2325 | unsigned r = val1->v.val_unsigned; |
2326 | if (for_eh_or_skip >= 0) |
2327 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2328 | gcc_assert (size_of_uleb128 (r) |
2329 | == size_of_uleb128 (val1->v.val_unsigned)); |
2330 | dw2_asm_output_data_uleb128 (r, NULL); |
2331 | dw2_asm_output_data_sleb128 (val2->v.val_int, NULL); |
2332 | } |
2333 | break; |
2334 | case DW_OP_piece: |
2335 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2336 | break; |
2337 | case DW_OP_bit_piece: |
2338 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2339 | dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL); |
2340 | break; |
2341 | case DW_OP_deref_size: |
2342 | case DW_OP_xderef_size: |
2343 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
2344 | break; |
2345 | |
2346 | case DW_OP_addr: |
2347 | if (loc->dtprel) |
2348 | { |
2349 | if (targetm.asm_out.output_dwarf_dtprel) |
2350 | { |
2351 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, |
2352 | DWARF2_ADDR_SIZE, |
2353 | val1->v.val_addr); |
2354 | fputc (c: '\n', stream: asm_out_file); |
2355 | } |
2356 | else |
2357 | gcc_unreachable (); |
2358 | } |
2359 | else |
2360 | { |
2361 | #ifdef DWARF2_DEBUGGING_INFO |
2362 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL); |
2363 | #else |
2364 | gcc_unreachable (); |
2365 | #endif |
2366 | } |
2367 | break; |
2368 | |
2369 | case DW_OP_GNU_addr_index: |
2370 | case DW_OP_addrx: |
2371 | case DW_OP_GNU_const_index: |
2372 | case DW_OP_constx: |
2373 | gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED); |
2374 | dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index, |
2375 | "(index into .debug_addr)" ); |
2376 | break; |
2377 | |
2378 | case DW_OP_call2: |
2379 | case DW_OP_call4: |
2380 | { |
2381 | unsigned long die_offset |
2382 | = get_ref_die_offset (val1->v.val_die_ref.die); |
2383 | /* Make sure the offset has been computed and that we can encode it as |
2384 | an operand. */ |
2385 | gcc_assert (die_offset > 0 |
2386 | && die_offset <= (loc->dw_loc_opc == DW_OP_call2 |
2387 | ? 0xffff |
2388 | : 0xffffffff)); |
2389 | dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4, |
2390 | die_offset, NULL); |
2391 | } |
2392 | break; |
2393 | |
2394 | case DW_OP_call_ref: |
2395 | case DW_OP_GNU_variable_value: |
2396 | { |
2397 | char label[MAX_ARTIFICIAL_LABEL_BYTES |
2398 | + HOST_BITS_PER_WIDE_INT / 2 + 2]; |
2399 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
2400 | get_ref_die_offset_label (label, val1->v.val_die_ref.die); |
2401 | dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL); |
2402 | } |
2403 | break; |
2404 | |
2405 | case DW_OP_implicit_pointer: |
2406 | case DW_OP_GNU_implicit_pointer: |
2407 | { |
2408 | char label[MAX_ARTIFICIAL_LABEL_BYTES |
2409 | + HOST_BITS_PER_WIDE_INT / 2 + 2]; |
2410 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
2411 | get_ref_die_offset_label (label, val1->v.val_die_ref.die); |
2412 | dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL); |
2413 | dw2_asm_output_data_sleb128 (val2->v.val_int, NULL); |
2414 | } |
2415 | break; |
2416 | |
2417 | case DW_OP_entry_value: |
2418 | case DW_OP_GNU_entry_value: |
2419 | dw2_asm_output_data_uleb128 (size_of_locs (loc: val1->v.val_loc), NULL); |
2420 | output_loc_sequence (val1->v.val_loc, for_eh_or_skip); |
2421 | break; |
2422 | |
2423 | case DW_OP_const_type: |
2424 | case DW_OP_GNU_const_type: |
2425 | { |
2426 | unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l; |
2427 | gcc_assert (o); |
2428 | dw2_asm_output_data_uleb128 (o, NULL); |
2429 | switch (val2->val_class) |
2430 | { |
2431 | case dw_val_class_const: |
2432 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
2433 | dw2_asm_output_data (1, l, NULL); |
2434 | dw2_asm_output_data (l, val2->v.val_int, NULL); |
2435 | break; |
2436 | case dw_val_class_vec: |
2437 | { |
2438 | unsigned int elt_size = val2->v.val_vec.elt_size; |
2439 | unsigned int len = val2->v.val_vec.length; |
2440 | unsigned int i; |
2441 | unsigned char *p; |
2442 | |
2443 | l = len * elt_size; |
2444 | dw2_asm_output_data (1, l, NULL); |
2445 | if (elt_size > sizeof (HOST_WIDE_INT)) |
2446 | { |
2447 | elt_size /= 2; |
2448 | len *= 2; |
2449 | } |
2450 | for (i = 0, p = (unsigned char *) val2->v.val_vec.array; |
2451 | i < len; |
2452 | i++, p += elt_size) |
2453 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
2454 | "fp or vector constant word %u" , i); |
2455 | } |
2456 | break; |
2457 | case dw_val_class_const_double: |
2458 | { |
2459 | unsigned HOST_WIDE_INT first, second; |
2460 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
2461 | |
2462 | dw2_asm_output_data (1, 2 * l, NULL); |
2463 | if (WORDS_BIG_ENDIAN) |
2464 | { |
2465 | first = val2->v.val_double.high; |
2466 | second = val2->v.val_double.low; |
2467 | } |
2468 | else |
2469 | { |
2470 | first = val2->v.val_double.low; |
2471 | second = val2->v.val_double.high; |
2472 | } |
2473 | dw2_asm_output_data (l, first, NULL); |
2474 | dw2_asm_output_data (l, second, NULL); |
2475 | } |
2476 | break; |
2477 | case dw_val_class_wide_int: |
2478 | { |
2479 | int i; |
2480 | int len = get_full_len (op: *val2->v.val_wide); |
2481 | l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
2482 | |
2483 | dw2_asm_output_data (1, len * l, NULL); |
2484 | if (WORDS_BIG_ENDIAN) |
2485 | for (i = len - 1; i >= 0; --i) |
2486 | dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL); |
2487 | else |
2488 | for (i = 0; i < len; ++i) |
2489 | dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL); |
2490 | } |
2491 | break; |
2492 | default: |
2493 | gcc_unreachable (); |
2494 | } |
2495 | } |
2496 | break; |
2497 | case DW_OP_regval_type: |
2498 | case DW_OP_GNU_regval_type: |
2499 | { |
2500 | unsigned r = val1->v.val_unsigned; |
2501 | unsigned long o = get_base_type_offset (val2->v.val_die_ref.die); |
2502 | gcc_assert (o); |
2503 | if (for_eh_or_skip >= 0) |
2504 | { |
2505 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2506 | gcc_assert (size_of_uleb128 (r) |
2507 | == size_of_uleb128 (val1->v.val_unsigned)); |
2508 | } |
2509 | dw2_asm_output_data_uleb128 (r, NULL); |
2510 | dw2_asm_output_data_uleb128 (o, NULL); |
2511 | } |
2512 | break; |
2513 | case DW_OP_deref_type: |
2514 | case DW_OP_GNU_deref_type: |
2515 | { |
2516 | unsigned long o = get_base_type_offset (val2->v.val_die_ref.die); |
2517 | gcc_assert (o); |
2518 | dw2_asm_output_data (1, val1->v.val_int, NULL); |
2519 | dw2_asm_output_data_uleb128 (o, NULL); |
2520 | } |
2521 | break; |
2522 | case DW_OP_convert: |
2523 | case DW_OP_reinterpret: |
2524 | case DW_OP_GNU_convert: |
2525 | case DW_OP_GNU_reinterpret: |
2526 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
2527 | dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL); |
2528 | else |
2529 | { |
2530 | unsigned long o = get_base_type_offset (val1->v.val_die_ref.die); |
2531 | gcc_assert (o); |
2532 | dw2_asm_output_data_uleb128 (o, NULL); |
2533 | } |
2534 | break; |
2535 | |
2536 | case DW_OP_GNU_parameter_ref: |
2537 | { |
2538 | unsigned long o; |
2539 | gcc_assert (val1->val_class == dw_val_class_die_ref); |
2540 | o = get_ref_die_offset (val1->v.val_die_ref.die); |
2541 | dw2_asm_output_data (4, o, NULL); |
2542 | } |
2543 | break; |
2544 | |
2545 | default: |
2546 | /* Other codes have no operands. */ |
2547 | break; |
2548 | } |
2549 | } |
2550 | |
2551 | /* Output a sequence of location operations. |
2552 | The for_eh_or_skip parameter controls whether register numbers are |
2553 | converted using DWARF2_FRAME_REG_OUT, which is needed in the case that |
2554 | hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind |
2555 | info). This should be suppressed for the cases that have not been converted |
2556 | (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */ |
2557 | |
2558 | void |
2559 | output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip) |
2560 | { |
2561 | for (; loc != NULL; loc = loc->dw_loc_next) |
2562 | { |
2563 | enum dwarf_location_atom opc = loc->dw_loc_opc; |
2564 | /* Output the opcode. */ |
2565 | if (for_eh_or_skip >= 0 |
2566 | && opc >= DW_OP_breg0 && opc <= DW_OP_breg31) |
2567 | { |
2568 | unsigned r = (opc - DW_OP_breg0); |
2569 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2570 | gcc_assert (r <= 31); |
2571 | opc = (enum dwarf_location_atom) (DW_OP_breg0 + r); |
2572 | } |
2573 | else if (for_eh_or_skip >= 0 |
2574 | && opc >= DW_OP_reg0 && opc <= DW_OP_reg31) |
2575 | { |
2576 | unsigned r = (opc - DW_OP_reg0); |
2577 | r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip); |
2578 | gcc_assert (r <= 31); |
2579 | opc = (enum dwarf_location_atom) (DW_OP_reg0 + r); |
2580 | } |
2581 | |
2582 | dw2_asm_output_data (1, opc, |
2583 | "%s" , dwarf_stack_op_name (op: opc)); |
2584 | |
2585 | /* Output the operand(s) (if any). */ |
2586 | output_loc_operands (loc, for_eh_or_skip); |
2587 | } |
2588 | } |
2589 | |
2590 | /* Output location description stack opcode's operands (if any). |
2591 | The output is single bytes on a line, suitable for .cfi_escape. */ |
2592 | |
2593 | static void |
2594 | output_loc_operands_raw (dw_loc_descr_ref loc) |
2595 | { |
2596 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
2597 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
2598 | |
2599 | switch (loc->dw_loc_opc) |
2600 | { |
2601 | case DW_OP_addr: |
2602 | case DW_OP_GNU_addr_index: |
2603 | case DW_OP_addrx: |
2604 | case DW_OP_GNU_const_index: |
2605 | case DW_OP_constx: |
2606 | case DW_OP_implicit_value: |
2607 | /* We cannot output addresses in .cfi_escape, only bytes. */ |
2608 | gcc_unreachable (); |
2609 | |
2610 | case DW_OP_const1u: |
2611 | case DW_OP_const1s: |
2612 | case DW_OP_pick: |
2613 | case DW_OP_deref_size: |
2614 | case DW_OP_xderef_size: |
2615 | fputc (c: ',', stream: asm_out_file); |
2616 | dw2_asm_output_data_raw (1, val1->v.val_int); |
2617 | break; |
2618 | |
2619 | case DW_OP_const2u: |
2620 | case DW_OP_const2s: |
2621 | fputc (c: ',', stream: asm_out_file); |
2622 | dw2_asm_output_data_raw (2, val1->v.val_int); |
2623 | break; |
2624 | |
2625 | case DW_OP_const4u: |
2626 | case DW_OP_const4s: |
2627 | fputc (c: ',', stream: asm_out_file); |
2628 | dw2_asm_output_data_raw (4, val1->v.val_int); |
2629 | break; |
2630 | |
2631 | case DW_OP_const8u: |
2632 | case DW_OP_const8s: |
2633 | gcc_assert (HOST_BITS_PER_WIDE_INT >= 64); |
2634 | fputc (c: ',', stream: asm_out_file); |
2635 | dw2_asm_output_data_raw (8, val1->v.val_int); |
2636 | break; |
2637 | |
2638 | case DW_OP_skip: |
2639 | case DW_OP_bra: |
2640 | { |
2641 | int offset; |
2642 | |
2643 | gcc_assert (val1->val_class == dw_val_class_loc); |
2644 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
2645 | |
2646 | fputc (c: ',', stream: asm_out_file); |
2647 | dw2_asm_output_data_raw (2, offset); |
2648 | } |
2649 | break; |
2650 | |
2651 | case DW_OP_regx: |
2652 | { |
2653 | unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1); |
2654 | gcc_assert (size_of_uleb128 (r) |
2655 | == size_of_uleb128 (val1->v.val_unsigned)); |
2656 | fputc (c: ',', stream: asm_out_file); |
2657 | dw2_asm_output_data_uleb128_raw (r); |
2658 | } |
2659 | break; |
2660 | |
2661 | case DW_OP_constu: |
2662 | case DW_OP_plus_uconst: |
2663 | case DW_OP_piece: |
2664 | fputc (c: ',', stream: asm_out_file); |
2665 | dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned); |
2666 | break; |
2667 | |
2668 | case DW_OP_bit_piece: |
2669 | fputc (c: ',', stream: asm_out_file); |
2670 | dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned); |
2671 | dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned); |
2672 | break; |
2673 | |
2674 | case DW_OP_consts: |
2675 | case DW_OP_breg0: |
2676 | case DW_OP_breg1: |
2677 | case DW_OP_breg2: |
2678 | case DW_OP_breg3: |
2679 | case DW_OP_breg4: |
2680 | case DW_OP_breg5: |
2681 | case DW_OP_breg6: |
2682 | case DW_OP_breg7: |
2683 | case DW_OP_breg8: |
2684 | case DW_OP_breg9: |
2685 | case DW_OP_breg10: |
2686 | case DW_OP_breg11: |
2687 | case DW_OP_breg12: |
2688 | case DW_OP_breg13: |
2689 | case DW_OP_breg14: |
2690 | case DW_OP_breg15: |
2691 | case DW_OP_breg16: |
2692 | case DW_OP_breg17: |
2693 | case DW_OP_breg18: |
2694 | case DW_OP_breg19: |
2695 | case DW_OP_breg20: |
2696 | case DW_OP_breg21: |
2697 | case DW_OP_breg22: |
2698 | case DW_OP_breg23: |
2699 | case DW_OP_breg24: |
2700 | case DW_OP_breg25: |
2701 | case DW_OP_breg26: |
2702 | case DW_OP_breg27: |
2703 | case DW_OP_breg28: |
2704 | case DW_OP_breg29: |
2705 | case DW_OP_breg30: |
2706 | case DW_OP_breg31: |
2707 | case DW_OP_fbreg: |
2708 | fputc (c: ',', stream: asm_out_file); |
2709 | dw2_asm_output_data_sleb128_raw (val1->v.val_int); |
2710 | break; |
2711 | |
2712 | case DW_OP_bregx: |
2713 | { |
2714 | unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1); |
2715 | gcc_assert (size_of_uleb128 (r) |
2716 | == size_of_uleb128 (val1->v.val_unsigned)); |
2717 | fputc (c: ',', stream: asm_out_file); |
2718 | dw2_asm_output_data_uleb128_raw (r); |
2719 | fputc (c: ',', stream: asm_out_file); |
2720 | dw2_asm_output_data_sleb128_raw (val2->v.val_int); |
2721 | } |
2722 | break; |
2723 | |
2724 | case DW_OP_implicit_pointer: |
2725 | case DW_OP_entry_value: |
2726 | case DW_OP_const_type: |
2727 | case DW_OP_regval_type: |
2728 | case DW_OP_deref_type: |
2729 | case DW_OP_convert: |
2730 | case DW_OP_reinterpret: |
2731 | case DW_OP_GNU_implicit_pointer: |
2732 | case DW_OP_GNU_entry_value: |
2733 | case DW_OP_GNU_const_type: |
2734 | case DW_OP_GNU_regval_type: |
2735 | case DW_OP_GNU_deref_type: |
2736 | case DW_OP_GNU_convert: |
2737 | case DW_OP_GNU_reinterpret: |
2738 | case DW_OP_GNU_parameter_ref: |
2739 | gcc_unreachable (); |
2740 | break; |
2741 | |
2742 | default: |
2743 | /* Other codes have no operands. */ |
2744 | break; |
2745 | } |
2746 | } |
2747 | |
2748 | void |
2749 | output_loc_sequence_raw (dw_loc_descr_ref loc) |
2750 | { |
2751 | while (1) |
2752 | { |
2753 | enum dwarf_location_atom opc = loc->dw_loc_opc; |
2754 | /* Output the opcode. */ |
2755 | if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31) |
2756 | { |
2757 | unsigned r = (opc - DW_OP_breg0); |
2758 | r = DWARF2_FRAME_REG_OUT (r, 1); |
2759 | gcc_assert (r <= 31); |
2760 | opc = (enum dwarf_location_atom) (DW_OP_breg0 + r); |
2761 | } |
2762 | else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31) |
2763 | { |
2764 | unsigned r = (opc - DW_OP_reg0); |
2765 | r = DWARF2_FRAME_REG_OUT (r, 1); |
2766 | gcc_assert (r <= 31); |
2767 | opc = (enum dwarf_location_atom) (DW_OP_reg0 + r); |
2768 | } |
2769 | /* Output the opcode. */ |
2770 | fprintf (stream: asm_out_file, format: "%#x" , opc); |
2771 | output_loc_operands_raw (loc); |
2772 | |
2773 | if (!loc->dw_loc_next) |
2774 | break; |
2775 | loc = loc->dw_loc_next; |
2776 | |
2777 | fputc (c: ',', stream: asm_out_file); |
2778 | } |
2779 | } |
2780 | |
2781 | static void |
2782 | build_breg_loc (struct dw_loc_descr_node **head, unsigned int regno) |
2783 | { |
2784 | if (regno <= 31) |
2785 | add_loc_descr (list_head: head, descr: new_loc_descr (op: (enum dwarf_location_atom) |
2786 | (DW_OP_breg0 + regno), oprnd1: 0, oprnd2: 0)); |
2787 | else |
2788 | add_loc_descr (list_head: head, descr: new_loc_descr (op: DW_OP_bregx, oprnd1: regno, oprnd2: 0)); |
2789 | } |
2790 | |
2791 | /* Build a dwarf location for a cfa_reg spanning multiple |
2792 | consecutive registers. */ |
2793 | |
2794 | struct dw_loc_descr_node * |
2795 | build_span_loc (struct cfa_reg reg) |
2796 | { |
2797 | struct dw_loc_descr_node *head = NULL; |
2798 | |
2799 | gcc_assert (reg.span_width > 0); |
2800 | gcc_assert (reg.span > 1); |
2801 | |
2802 | /* Start from the highest number register as it goes in the upper bits. */ |
2803 | unsigned int regno = reg.reg + reg.span - 1; |
2804 | build_breg_loc (head: &head, regno); |
2805 | |
2806 | /* Deal with the remaining registers in the span. */ |
2807 | for (int i = reg.span - 2; i >= 0; i--) |
2808 | { |
2809 | add_loc_descr (list_head: &head, descr: int_loc_descriptor (reg.span_width * 8)); |
2810 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
2811 | regno--; |
2812 | build_breg_loc (head: &head, regno); |
2813 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
2814 | } |
2815 | return head; |
2816 | } |
2817 | |
2818 | /* This function builds a dwarf location descriptor sequence from a |
2819 | dw_cfa_location, adding the given OFFSET to the result of the |
2820 | expression. */ |
2821 | |
2822 | struct dw_loc_descr_node * |
2823 | build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset) |
2824 | { |
2825 | struct dw_loc_descr_node *head, *tmp; |
2826 | |
2827 | offset += cfa->offset; |
2828 | |
2829 | if (cfa->reg.span > 1) |
2830 | { |
2831 | head = build_span_loc (reg: cfa->reg); |
2832 | |
2833 | if (maybe_ne (a: offset, b: 0)) |
2834 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
2835 | } |
2836 | else if (cfa->indirect) |
2837 | { |
2838 | head = new_reg_loc_descr (reg: cfa->reg.reg, offset: cfa->base_offset); |
2839 | head->dw_loc_oprnd1.val_class = dw_val_class_const; |
2840 | head->dw_loc_oprnd1.val_entry = NULL; |
2841 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
2842 | add_loc_descr (list_head: &head, descr: tmp); |
2843 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
2844 | } |
2845 | else |
2846 | head = new_reg_loc_descr (reg: cfa->reg.reg, offset); |
2847 | |
2848 | return head; |
2849 | } |
2850 | |
2851 | /* This function builds a dwarf location descriptor sequence for |
2852 | the address at OFFSET from the CFA when stack is aligned to |
2853 | ALIGNMENT byte. */ |
2854 | |
2855 | struct dw_loc_descr_node * |
2856 | build_cfa_aligned_loc (dw_cfa_location *cfa, |
2857 | poly_int64 offset, HOST_WIDE_INT alignment) |
2858 | { |
2859 | struct dw_loc_descr_node *head; |
2860 | unsigned int dwarf_fp |
2861 | = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM); |
2862 | |
2863 | /* When CFA is defined as FP+OFFSET, emulate stack alignment. */ |
2864 | if (cfa->reg.reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0) |
2865 | { |
2866 | head = new_reg_loc_descr (reg: dwarf_fp, offset: 0); |
2867 | add_loc_descr (list_head: &head, descr: int_loc_descriptor (alignment)); |
2868 | add_loc_descr (list_head: &head, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
2869 | loc_descr_plus_const (list_head: &head, poly_offset: offset); |
2870 | } |
2871 | else |
2872 | head = new_reg_loc_descr (reg: dwarf_fp, offset); |
2873 | return head; |
2874 | } |
2875 | |
2876 | /* And now, the support for symbolic debugging information. */ |
2877 | |
2878 | /* .debug_str support. */ |
2879 | |
2880 | static void dwarf2out_init (const char *); |
2881 | static void dwarf2out_finish (const char *); |
2882 | static void dwarf2out_early_finish (const char *); |
2883 | static void dwarf2out_assembly_start (void); |
2884 | static void dwarf2out_define (unsigned int, const char *); |
2885 | static void dwarf2out_undef (unsigned int, const char *); |
2886 | static void dwarf2out_start_source_file (unsigned, const char *); |
2887 | static void dwarf2out_end_source_file (unsigned); |
2888 | static void dwarf2out_function_decl (tree); |
2889 | static void dwarf2out_begin_block (unsigned, unsigned); |
2890 | static void dwarf2out_end_block (unsigned, unsigned); |
2891 | static bool dwarf2out_ignore_block (const_tree); |
2892 | static void dwarf2out_set_ignored_loc (unsigned, unsigned, const char *); |
2893 | static void dwarf2out_early_global_decl (tree); |
2894 | static void dwarf2out_late_global_decl (tree); |
2895 | static void dwarf2out_type_decl (tree, int); |
2896 | static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool); |
2897 | static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree, |
2898 | dw_die_ref); |
2899 | static void dwarf2out_abstract_function (tree); |
2900 | static void dwarf2out_var_location (rtx_insn *); |
2901 | static void dwarf2out_inline_entry (tree); |
2902 | static void dwarf2out_size_function (tree); |
2903 | static void dwarf2out_begin_function (tree); |
2904 | static void dwarf2out_end_function (unsigned int); |
2905 | static void dwarf2out_register_main_translation_unit (tree unit); |
2906 | static void dwarf2out_set_name (tree, tree); |
2907 | static void dwarf2out_register_external_die (tree decl, const char *sym, |
2908 | unsigned HOST_WIDE_INT off); |
2909 | static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym, |
2910 | unsigned HOST_WIDE_INT *off); |
2911 | |
2912 | /* The debug hooks structure. */ |
2913 | |
2914 | const struct gcc_debug_hooks dwarf2_debug_hooks = |
2915 | { |
2916 | .init: dwarf2out_init, |
2917 | .finish: dwarf2out_finish, |
2918 | .early_finish: dwarf2out_early_finish, |
2919 | .assembly_start: dwarf2out_assembly_start, |
2920 | .define: dwarf2out_define, |
2921 | .undef: dwarf2out_undef, |
2922 | .start_source_file: dwarf2out_start_source_file, |
2923 | .end_source_file: dwarf2out_end_source_file, |
2924 | .begin_block: dwarf2out_begin_block, |
2925 | .end_block: dwarf2out_end_block, |
2926 | .ignore_block: dwarf2out_ignore_block, |
2927 | .source_line: dwarf2out_source_line, |
2928 | .set_ignored_loc: dwarf2out_set_ignored_loc, |
2929 | .begin_prologue: dwarf2out_begin_prologue, |
2930 | #if VMS_DEBUGGING_INFO |
2931 | dwarf2out_vms_end_prologue, |
2932 | dwarf2out_vms_begin_epilogue, |
2933 | #else |
2934 | .end_prologue: debug_nothing_int_charstar, |
2935 | .begin_epilogue: debug_nothing_int_charstar, |
2936 | #endif |
2937 | .end_epilogue: dwarf2out_end_epilogue, |
2938 | .begin_function: dwarf2out_begin_function, |
2939 | .end_function: dwarf2out_end_function, /* end_function */ |
2940 | .register_main_translation_unit: dwarf2out_register_main_translation_unit, |
2941 | .function_decl: dwarf2out_function_decl, /* function_decl */ |
2942 | .early_global_decl: dwarf2out_early_global_decl, |
2943 | .late_global_decl: dwarf2out_late_global_decl, |
2944 | .type_decl: dwarf2out_type_decl, /* type_decl */ |
2945 | .imported_module_or_decl: dwarf2out_imported_module_or_decl, |
2946 | .die_ref_for_decl: dwarf2out_die_ref_for_decl, |
2947 | .register_external_die: dwarf2out_register_external_die, |
2948 | .deferred_inline_function: debug_nothing_tree, /* deferred_inline_function */ |
2949 | /* The DWARF 2 backend tries to reduce debugging bloat by not |
2950 | emitting the abstract description of inline functions until |
2951 | something tries to reference them. */ |
2952 | .outlining_inline_function: dwarf2out_abstract_function, /* outlining_inline_function */ |
2953 | .label: debug_nothing_rtx_code_label, /* label */ |
2954 | .handle_pch: debug_nothing_int, /* handle_pch */ |
2955 | .var_location: dwarf2out_var_location, |
2956 | .inline_entry: dwarf2out_inline_entry, /* inline_entry */ |
2957 | .size_function: dwarf2out_size_function, /* size_function */ |
2958 | .switch_text_section: dwarf2out_switch_text_section, |
2959 | .set_name: dwarf2out_set_name, |
2960 | .start_end_main_source_file: 1, /* start_end_main_source_file */ |
2961 | TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */ |
2962 | }; |
2963 | |
2964 | const struct gcc_debug_hooks dwarf2_lineno_debug_hooks = |
2965 | { |
2966 | .init: dwarf2out_init, |
2967 | .finish: debug_nothing_charstar, |
2968 | .early_finish: debug_nothing_charstar, |
2969 | .assembly_start: dwarf2out_assembly_start, |
2970 | .define: debug_nothing_int_charstar, |
2971 | .undef: debug_nothing_int_charstar, |
2972 | .start_source_file: debug_nothing_int_charstar, |
2973 | .end_source_file: debug_nothing_int, |
2974 | .begin_block: debug_nothing_int_int, /* begin_block */ |
2975 | .end_block: debug_nothing_int_int, /* end_block */ |
2976 | .ignore_block: debug_true_const_tree, /* ignore_block */ |
2977 | .source_line: dwarf2out_source_line, /* source_line */ |
2978 | .set_ignored_loc: debug_nothing_int_int_charstar, /* set_ignored_loc */ |
2979 | .begin_prologue: debug_nothing_int_int_charstar, /* begin_prologue */ |
2980 | .end_prologue: debug_nothing_int_charstar, /* end_prologue */ |
2981 | .begin_epilogue: debug_nothing_int_charstar, /* begin_epilogue */ |
2982 | .end_epilogue: debug_nothing_int_charstar, /* end_epilogue */ |
2983 | .begin_function: debug_nothing_tree, /* begin_function */ |
2984 | .end_function: debug_nothing_int, /* end_function */ |
2985 | .register_main_translation_unit: debug_nothing_tree, /* register_main_translation_unit */ |
2986 | .function_decl: debug_nothing_tree, /* function_decl */ |
2987 | .early_global_decl: debug_nothing_tree, /* early_global_decl */ |
2988 | .late_global_decl: debug_nothing_tree, /* late_global_decl */ |
2989 | .type_decl: debug_nothing_tree_int, /* type_decl */ |
2990 | .imported_module_or_decl: debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */ |
2991 | .die_ref_for_decl: debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */ |
2992 | .register_external_die: debug_nothing_tree_charstar_uhwi, /* register_external_die */ |
2993 | .deferred_inline_function: debug_nothing_tree, /* deferred_inline_function */ |
2994 | .outlining_inline_function: debug_nothing_tree, /* outlining_inline_function */ |
2995 | .label: debug_nothing_rtx_code_label, /* label */ |
2996 | .handle_pch: debug_nothing_int, /* handle_pch */ |
2997 | .var_location: debug_nothing_rtx_insn, /* var_location */ |
2998 | .inline_entry: debug_nothing_tree, /* inline_entry */ |
2999 | .size_function: debug_nothing_tree, /* size_function */ |
3000 | .switch_text_section: debug_nothing_void, /* switch_text_section */ |
3001 | .set_name: debug_nothing_tree_tree, /* set_name */ |
3002 | .start_end_main_source_file: 0, /* start_end_main_source_file */ |
3003 | TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */ |
3004 | }; |
3005 | |
3006 | /* NOTE: In the comments in this file, many references are made to |
3007 | "Debugging Information Entries". This term is abbreviated as `DIE' |
3008 | throughout the remainder of this file. */ |
3009 | |
3010 | /* An internal representation of the DWARF output is built, and then |
3011 | walked to generate the DWARF debugging info. The walk of the internal |
3012 | representation is done after the entire program has been compiled. |
3013 | The types below are used to describe the internal representation. */ |
3014 | |
3015 | /* Whether to put type DIEs into their own section .debug_types instead |
3016 | of making them part of the .debug_info section. Only supported for |
3017 | Dwarf V4 or higher and the user didn't disable them through |
3018 | -fno-debug-types-section. It is more efficient to put them in a |
3019 | separate comdat sections since the linker will then be able to |
3020 | remove duplicates. But not all tools support .debug_types sections |
3021 | yet. For Dwarf V5 or higher .debug_types doesn't exist any more, |
3022 | it is DW_UT_type unit type in .debug_info section. For late LTO |
3023 | debug there should be almost no types emitted so avoid enabling |
3024 | -fdebug-types-section there. */ |
3025 | |
3026 | #define use_debug_types (dwarf_version >= 4 \ |
3027 | && flag_debug_types_section \ |
3028 | && !in_lto_p) |
3029 | |
3030 | /* Various DIE's use offsets relative to the beginning of the |
3031 | .debug_info section to refer to each other. */ |
3032 | |
3033 | typedef long int dw_offset; |
3034 | |
3035 | struct comdat_type_node; |
3036 | |
3037 | /* The entries in the line_info table more-or-less mirror the opcodes |
3038 | that are used in the real dwarf line table. Arrays of these entries |
3039 | are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not |
3040 | supported. */ |
3041 | |
3042 | enum dw_line_info_opcode { |
3043 | /* Emit DW_LNE_set_address; the operand is the label index. */ |
3044 | LI_set_address, |
3045 | |
3046 | /* Emit a row to the matrix with the given line. This may be done |
3047 | via any combination of DW_LNS_copy, DW_LNS_advance_line, and |
3048 | special opcodes. */ |
3049 | LI_set_line, |
3050 | |
3051 | /* Emit a DW_LNS_set_file. */ |
3052 | LI_set_file, |
3053 | |
3054 | /* Emit a DW_LNS_set_column. */ |
3055 | LI_set_column, |
3056 | |
3057 | /* Emit a DW_LNS_negate_stmt; the operand is ignored. */ |
3058 | LI_negate_stmt, |
3059 | |
3060 | /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */ |
3061 | LI_set_prologue_end, |
3062 | LI_set_epilogue_begin, |
3063 | |
3064 | /* Emit a DW_LNE_set_discriminator. */ |
3065 | LI_set_discriminator, |
3066 | |
3067 | /* Output a Fixed Advance PC; the target PC is the label index; the |
3068 | base PC is the previous LI_adv_address or LI_set_address entry. |
3069 | We only use this when emitting debug views without assembler |
3070 | support, at explicit user request. Ideally, we should only use |
3071 | it when the offset might be zero but we can't tell: it's the only |
3072 | way to maybe change the PC without resetting the view number. */ |
3073 | LI_adv_address |
3074 | }; |
3075 | |
3076 | typedef struct GTY(()) dw_line_info_struct { |
3077 | enum dw_line_info_opcode opcode; |
3078 | unsigned int val; |
3079 | } dw_line_info_entry; |
3080 | |
3081 | |
3082 | struct GTY(()) dw_line_info_table { |
3083 | /* The label that marks the end of this section. */ |
3084 | const char *end_label; |
3085 | |
3086 | /* The values for the last row of the matrix, as collected in the table. |
3087 | These are used to minimize the changes to the next row. */ |
3088 | unsigned int file_num; |
3089 | unsigned int line_num; |
3090 | unsigned int column_num; |
3091 | int discrim_num; |
3092 | bool is_stmt; |
3093 | bool in_use; |
3094 | |
3095 | /* This denotes the NEXT view number. |
3096 | |
3097 | If it is 0, it is known that the NEXT view will be the first view |
3098 | at the given PC. |
3099 | |
3100 | If it is -1, we're forcing the view number to be reset, e.g. at a |
3101 | function entry. |
3102 | |
3103 | The meaning of other nonzero values depends on whether we're |
3104 | computing views internally or leaving it for the assembler to do |
3105 | so. If we're emitting them internally, view denotes the view |
3106 | number since the last known advance of PC. If we're leaving it |
3107 | for the assembler, it denotes the LVU label number that we're |
3108 | going to ask the assembler to assign. */ |
3109 | var_loc_view view; |
3110 | |
3111 | /* This counts the number of symbolic views emitted in this table |
3112 | since the latest view reset. Its max value, over all tables, |
3113 | sets symview_upper_bound. */ |
3114 | var_loc_view symviews_since_reset; |
3115 | |
3116 | #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1) |
3117 | #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0) |
3118 | #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1) |
3119 | #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x)) |
3120 | |
3121 | vec<dw_line_info_entry, va_gc> *entries; |
3122 | }; |
3123 | |
3124 | /* This is an upper bound for view numbers that the assembler may |
3125 | assign to symbolic views output in this translation. It is used to |
3126 | decide how big a field to use to represent view numbers in |
3127 | symview-classed attributes. */ |
3128 | |
3129 | static var_loc_view symview_upper_bound; |
3130 | |
3131 | /* If we're keep track of location views and their reset points, and |
3132 | INSN is a reset point (i.e., it necessarily advances the PC), mark |
3133 | the next view in TABLE as reset. */ |
3134 | |
3135 | static void |
3136 | maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table) |
3137 | { |
3138 | if (!debug_internal_reset_location_views) |
3139 | return; |
3140 | |
3141 | /* Maybe turn (part of?) this test into a default target hook. */ |
3142 | int reset = 0; |
3143 | |
3144 | if (targetm.reset_location_view) |
3145 | reset = targetm.reset_location_view (insn); |
3146 | |
3147 | if (reset) |
3148 | ; |
3149 | else if (JUMP_TABLE_DATA_P (insn)) |
3150 | reset = 1; |
3151 | else if (GET_CODE (insn) == USE |
3152 | || GET_CODE (insn) == CLOBBER |
3153 | || GET_CODE (insn) == ASM_INPUT |
3154 | || asm_noperands (insn) >= 0) |
3155 | ; |
3156 | else if (get_attr_min_length (insn) > 0) |
3157 | reset = 1; |
3158 | |
3159 | if (reset > 0 && !RESETTING_VIEW_P (table->view)) |
3160 | RESET_NEXT_VIEW (table->view); |
3161 | } |
3162 | |
3163 | /* The Debugging Information Entry (DIE) structure. DIEs form a tree. |
3164 | The children of each node form a circular list linked by |
3165 | die_sib. die_child points to the node *before* the "first" child node. */ |
3166 | |
3167 | typedef struct GTY((chain_circular ("%h.die_sib" ), for_user)) die_struct { |
3168 | union die_symbol_or_type_node |
3169 | { |
3170 | const char * GTY ((tag ("0" ))) die_symbol; |
3171 | comdat_type_node *GTY ((tag ("1" ))) die_type_node; |
3172 | } |
3173 | GTY ((desc ("%0.comdat_type_p" ))) die_id; |
3174 | vec<dw_attr_node, va_gc> *die_attr; |
3175 | dw_die_ref die_parent; |
3176 | dw_die_ref die_child; |
3177 | dw_die_ref die_sib; |
3178 | dw_die_ref die_definition; /* ref from a specification to its definition */ |
3179 | dw_offset die_offset; |
3180 | unsigned long die_abbrev; |
3181 | int die_mark; |
3182 | unsigned int decl_id; |
3183 | enum dwarf_tag die_tag; |
3184 | /* Die is used and must not be pruned as unused. */ |
3185 | BOOL_BITFIELD die_perennial_p : 1; |
3186 | BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */ |
3187 | /* For an external ref to die_symbol if die_offset contains an extra |
3188 | offset to that symbol. */ |
3189 | BOOL_BITFIELD with_offset : 1; |
3190 | /* Whether this DIE was removed from the DIE tree, for example via |
3191 | prune_unused_types. We don't consider those present from the |
3192 | DIE lookup routines. */ |
3193 | BOOL_BITFIELD removed : 1; |
3194 | /* Lots of spare bits. */ |
3195 | } |
3196 | die_node; |
3197 | |
3198 | /* Set to TRUE while dwarf2out_early_global_decl is running. */ |
3199 | static bool early_dwarf; |
3200 | static bool early_dwarf_finished; |
3201 | class set_early_dwarf { |
3202 | public: |
3203 | bool saved; |
3204 | set_early_dwarf () : saved(early_dwarf) |
3205 | { |
3206 | gcc_assert (! early_dwarf_finished); |
3207 | early_dwarf = true; |
3208 | } |
3209 | ~set_early_dwarf () { early_dwarf = saved; } |
3210 | }; |
3211 | |
3212 | /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */ |
3213 | #define FOR_EACH_CHILD(die, c, expr) do { \ |
3214 | c = die->die_child; \ |
3215 | if (c) do { \ |
3216 | c = c->die_sib; \ |
3217 | expr; \ |
3218 | } while (c != die->die_child); \ |
3219 | } while (0) |
3220 | |
3221 | /* The pubname structure */ |
3222 | |
3223 | typedef struct GTY(()) pubname_struct { |
3224 | dw_die_ref die; |
3225 | const char *name; |
3226 | } |
3227 | pubname_entry; |
3228 | |
3229 | |
3230 | struct GTY(()) dw_ranges { |
3231 | const char *label; |
3232 | /* If this is positive, it's a block number, otherwise it's a |
3233 | bitwise-negated index into dw_ranges_by_label. */ |
3234 | int num; |
3235 | /* If idx is equal to DW_RANGES_IDX_SKELETON, it should be emitted |
3236 | into .debug_rnglists section rather than .debug_rnglists.dwo |
3237 | for -gsplit-dwarf and DWARF >= 5. */ |
3238 | #define DW_RANGES_IDX_SKELETON ((1U << 31) - 1) |
3239 | /* Index for the range list for DW_FORM_rnglistx. */ |
3240 | unsigned int idx : 31; |
3241 | /* True if this range might be possibly in a different section |
3242 | from previous entry. */ |
3243 | unsigned int maybe_new_sec : 1; |
3244 | addr_table_entry *begin_entry; |
3245 | addr_table_entry *end_entry; |
3246 | }; |
3247 | |
3248 | /* A structure to hold a macinfo entry. */ |
3249 | |
3250 | typedef struct GTY(()) macinfo_struct { |
3251 | unsigned char code; |
3252 | unsigned HOST_WIDE_INT lineno; |
3253 | const char *info; |
3254 | } |
3255 | macinfo_entry; |
3256 | |
3257 | |
3258 | struct GTY(()) dw_ranges_by_label { |
3259 | const char *begin; |
3260 | const char *end; |
3261 | }; |
3262 | |
3263 | /* The comdat type node structure. */ |
3264 | struct GTY(()) comdat_type_node |
3265 | { |
3266 | dw_die_ref root_die; |
3267 | dw_die_ref type_die; |
3268 | dw_die_ref skeleton_die; |
3269 | char signature[DWARF_TYPE_SIGNATURE_SIZE]; |
3270 | comdat_type_node *next; |
3271 | }; |
3272 | |
3273 | /* A list of DIEs for which we can't determine ancestry (parent_die |
3274 | field) just yet. Later in dwarf2out_finish we will fill in the |
3275 | missing bits. */ |
3276 | typedef struct GTY(()) limbo_die_struct { |
3277 | dw_die_ref die; |
3278 | /* The tree for which this DIE was created. We use this to |
3279 | determine ancestry later. */ |
3280 | tree created_for; |
3281 | struct limbo_die_struct *next; |
3282 | } |
3283 | limbo_die_node; |
3284 | |
3285 | typedef struct skeleton_chain_struct |
3286 | { |
3287 | dw_die_ref old_die; |
3288 | dw_die_ref new_die; |
3289 | struct skeleton_chain_struct *parent; |
3290 | } |
3291 | skeleton_chain_node; |
3292 | |
3293 | /* Define a macro which returns nonzero for a TYPE_DECL which was |
3294 | implicitly generated for a type. |
3295 | |
3296 | Note that, unlike the C front-end (which generates a NULL named |
3297 | TYPE_DECL node for each complete tagged type, each array type, |
3298 | and each function type node created) the C++ front-end generates |
3299 | a _named_ TYPE_DECL node for each tagged type node created. |
3300 | These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to |
3301 | generate a DW_TAG_typedef DIE for them. Likewise with the Ada |
3302 | front-end, but for each type, tagged or not. */ |
3303 | |
3304 | #define TYPE_DECL_IS_STUB(decl) \ |
3305 | (DECL_NAME (decl) == NULL_TREE \ |
3306 | || (DECL_ARTIFICIAL (decl) \ |
3307 | && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \ |
3308 | /* This is necessary for stub decls that \ |
3309 | appear in nested inline functions. */ \ |
3310 | || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \ |
3311 | && (decl_ultimate_origin (decl) \ |
3312 | == TYPE_STUB_DECL (TREE_TYPE (decl))))))) |
3313 | |
3314 | /* Information concerning the compilation unit's programming |
3315 | language, and compiler version. */ |
3316 | |
3317 | /* Fixed size portion of the DWARF compilation unit header. */ |
3318 | #define \ |
3319 | (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size \ |
3320 | + (dwarf_version >= 5 ? 4 : 3)) |
3321 | |
3322 | /* Fixed size portion of the DWARF comdat type unit header. */ |
3323 | #define \ |
3324 | (DWARF_COMPILE_UNIT_HEADER_SIZE \ |
3325 | + DWARF_TYPE_SIGNATURE_SIZE + dwarf_offset_size) |
3326 | |
3327 | /* Fixed size portion of the DWARF skeleton compilation unit header. */ |
3328 | #define \ |
3329 | (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0)) |
3330 | |
3331 | /* Fixed size portion of public names info. */ |
3332 | #define (2 * dwarf_offset_size + 2) |
3333 | |
3334 | /* Fixed size portion of the address range info. */ |
3335 | #define \ |
3336 | (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \ |
3337 | DWARF2_ADDR_SIZE * 2) \ |
3338 | - DWARF_INITIAL_LENGTH_SIZE) |
3339 | |
3340 | /* Size of padding portion in the address range info. It must be |
3341 | aligned to twice the pointer size. */ |
3342 | #define DWARF_ARANGES_PAD_SIZE \ |
3343 | (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \ |
3344 | DWARF2_ADDR_SIZE * 2) \ |
3345 | - (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4)) |
3346 | |
3347 | /* Use assembler line directives if available. */ |
3348 | #ifndef DWARF2_ASM_LINE_DEBUG_INFO |
3349 | #ifdef HAVE_AS_DWARF2_DEBUG_LINE |
3350 | #define DWARF2_ASM_LINE_DEBUG_INFO 1 |
3351 | #else |
3352 | #define DWARF2_ASM_LINE_DEBUG_INFO 0 |
3353 | #endif |
3354 | #endif |
3355 | |
3356 | /* Use assembler views in line directives if available. */ |
3357 | #ifndef DWARF2_ASM_VIEW_DEBUG_INFO |
3358 | #ifdef HAVE_AS_DWARF2_DEBUG_VIEW |
3359 | #define DWARF2_ASM_VIEW_DEBUG_INFO 1 |
3360 | #else |
3361 | #define DWARF2_ASM_VIEW_DEBUG_INFO 0 |
3362 | #endif |
3363 | #endif |
3364 | |
3365 | /* Return true if GCC configure detected assembler support for .loc. */ |
3366 | |
3367 | bool |
3368 | dwarf2out_default_as_loc_support (void) |
3369 | { |
3370 | return DWARF2_ASM_LINE_DEBUG_INFO; |
3371 | #if (GCC_VERSION >= 3000) |
3372 | # undef DWARF2_ASM_LINE_DEBUG_INFO |
3373 | # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO |
3374 | #endif |
3375 | } |
3376 | |
3377 | /* Return true if GCC configure detected assembler support for views |
3378 | in .loc directives. */ |
3379 | |
3380 | bool |
3381 | dwarf2out_default_as_locview_support (void) |
3382 | { |
3383 | return DWARF2_ASM_VIEW_DEBUG_INFO; |
3384 | #if (GCC_VERSION >= 3000) |
3385 | # undef DWARF2_ASM_VIEW_DEBUG_INFO |
3386 | # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO |
3387 | #endif |
3388 | } |
3389 | |
3390 | /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported |
3391 | view computation, and it refers to a view identifier for which we |
3392 | will not emit a label because it is known to map to a view number |
3393 | zero. We won't allocate the bitmap if we're not using assembler |
3394 | support for location views, but we have to make the variable |
3395 | visible for GGC and for code that will be optimized out for lack of |
3396 | support but that's still parsed and compiled. We could abstract it |
3397 | out with macros, but it's not worth it. */ |
3398 | static GTY(()) bitmap zero_view_p; |
3399 | |
3400 | /* Evaluate to TRUE iff N is known to identify the first location view |
3401 | at its PC. When not using assembler location view computation, |
3402 | that must be view number zero. Otherwise, ZERO_VIEW_P is allocated |
3403 | and views label numbers recorded in it are the ones known to be |
3404 | zero. */ |
3405 | #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \ |
3406 | || (N) == (var_loc_view)-1 \ |
3407 | || (zero_view_p \ |
3408 | && bitmap_bit_p (zero_view_p, (N)))) |
3409 | |
3410 | /* Return true iff we're to emit .loc directives for the assembler to |
3411 | generate line number sections. |
3412 | |
3413 | When we're not emitting views, all we need from the assembler is |
3414 | support for .loc directives. |
3415 | |
3416 | If we are emitting views, we can only use the assembler's .loc |
3417 | support if it also supports views. |
3418 | |
3419 | When the compiler is emitting the line number programs and |
3420 | computing view numbers itself, it resets view numbers at known PC |
3421 | changes and counts from that, and then it emits view numbers as |
3422 | literal constants in locviewlists. There are cases in which the |
3423 | compiler is not sure about PC changes, e.g. when extra alignment is |
3424 | requested for a label. In these cases, the compiler may not reset |
3425 | the view counter, and the potential PC advance in the line number |
3426 | program will use an opcode that does not reset the view counter |
3427 | even if the PC actually changes, so that compiler and debug info |
3428 | consumer can keep view numbers in sync. |
3429 | |
3430 | When the compiler defers view computation to the assembler, it |
3431 | emits symbolic view numbers in locviewlists, with the exception of |
3432 | views known to be zero (forced resets, or reset after |
3433 | compiler-visible PC changes): instead of emitting symbols for |
3434 | these, we emit literal zero and assert the assembler agrees with |
3435 | the compiler's assessment. We could use symbolic views everywhere, |
3436 | instead of special-casing zero views, but then we'd be unable to |
3437 | optimize out locviewlists that contain only zeros. */ |
3438 | |
3439 | static bool |
3440 | output_asm_line_debug_info (void) |
3441 | { |
3442 | return (dwarf2out_as_loc_support |
3443 | && (dwarf2out_as_locview_support |
3444 | || !debug_variable_location_views)); |
3445 | } |
3446 | |
3447 | static bool asm_outputs_debug_line_str (void); |
3448 | |
3449 | /* Minimum line offset in a special line info. opcode. |
3450 | This value was chosen to give a reasonable range of values. */ |
3451 | #define DWARF_LINE_BASE -10 |
3452 | |
3453 | /* First special line opcode - leave room for the standard opcodes. */ |
3454 | #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1) |
3455 | |
3456 | /* Range of line offsets in a special line info. opcode. */ |
3457 | #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1) |
3458 | |
3459 | /* Flag that indicates the initial value of the is_stmt_start flag. |
3460 | In the present implementation, we do not mark any lines as |
3461 | the beginning of a source statement, because that information |
3462 | is not made available by the GCC front-end. */ |
3463 | #define DWARF_LINE_DEFAULT_IS_STMT_START 1 |
3464 | |
3465 | /* Maximum number of operations per instruction bundle. */ |
3466 | #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN |
3467 | #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1 |
3468 | #endif |
3469 | |
3470 | /* This location is used by calc_die_sizes() to keep track |
3471 | the offset of each DIE within the .debug_info section. */ |
3472 | static unsigned long next_die_offset; |
3473 | |
3474 | /* Record the root of the DIE's built for the current compilation unit. */ |
3475 | static GTY(()) dw_die_ref single_comp_unit_die; |
3476 | |
3477 | /* A list of type DIEs that have been separated into comdat sections. */ |
3478 | static GTY(()) comdat_type_node *comdat_type_list; |
3479 | |
3480 | /* A list of CU DIEs that have been separated. */ |
3481 | static GTY(()) limbo_die_node *cu_die_list; |
3482 | |
3483 | /* A list of DIEs with a NULL parent waiting to be relocated. */ |
3484 | static GTY(()) limbo_die_node *limbo_die_list; |
3485 | |
3486 | /* A list of DIEs for which we may have to generate |
3487 | DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */ |
3488 | static GTY(()) limbo_die_node *deferred_asm_name; |
3489 | |
3490 | struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data> |
3491 | { |
3492 | typedef const char *compare_type; |
3493 | |
3494 | static hashval_t hash (dwarf_file_data *); |
3495 | static bool equal (dwarf_file_data *, const char *); |
3496 | }; |
3497 | |
3498 | /* Filenames referenced by this compilation unit. */ |
3499 | static GTY(()) hash_table<dwarf_file_hasher> *file_table; |
3500 | |
3501 | struct decl_die_hasher : ggc_ptr_hash<die_node> |
3502 | { |
3503 | typedef tree compare_type; |
3504 | |
3505 | static hashval_t hash (die_node *); |
3506 | static bool equal (die_node *, tree); |
3507 | }; |
3508 | /* A hash table of references to DIE's that describe declarations. |
3509 | The key is a DECL_UID() which is a unique number identifying each decl. */ |
3510 | static GTY (()) hash_table<decl_die_hasher> *decl_die_table; |
3511 | |
3512 | struct GTY ((for_user)) variable_value_struct { |
3513 | unsigned int decl_id; |
3514 | vec<dw_die_ref, va_gc> *dies; |
3515 | }; |
3516 | |
3517 | struct variable_value_hasher : ggc_ptr_hash<variable_value_struct> |
3518 | { |
3519 | typedef tree compare_type; |
3520 | |
3521 | static hashval_t hash (variable_value_struct *); |
3522 | static bool equal (variable_value_struct *, tree); |
3523 | }; |
3524 | /* A hash table of DIEs that contain DW_OP_GNU_variable_value with |
3525 | dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is |
3526 | DECL_CONTEXT of the referenced VAR_DECLs. */ |
3527 | static GTY (()) hash_table<variable_value_hasher> *variable_value_hash; |
3528 | |
3529 | struct block_die_hasher : ggc_ptr_hash<die_struct> |
3530 | { |
3531 | static hashval_t hash (die_struct *); |
3532 | static bool equal (die_struct *, die_struct *); |
3533 | }; |
3534 | |
3535 | /* A hash table of references to DIE's that describe COMMON blocks. |
3536 | The key is DECL_UID() ^ die_parent. */ |
3537 | static GTY (()) hash_table<block_die_hasher> *common_block_die_table; |
3538 | |
3539 | typedef struct GTY(()) die_arg_entry_struct { |
3540 | dw_die_ref die; |
3541 | tree arg; |
3542 | } die_arg_entry; |
3543 | |
3544 | |
3545 | /* Node of the variable location list. */ |
3546 | struct GTY ((chain_next ("%h.next" ))) var_loc_node { |
3547 | /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables, |
3548 | EXPR_LIST chain. For small bitsizes, bitsize is encoded |
3549 | in mode of the EXPR_LIST node and first EXPR_LIST operand |
3550 | is either NOTE_INSN_VAR_LOCATION for a piece with a known |
3551 | location or NULL for padding. For larger bitsizes, |
3552 | mode is 0 and first operand is a CONCAT with bitsize |
3553 | as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp. |
3554 | NULL as second operand. */ |
3555 | rtx GTY (()) loc; |
3556 | const char * GTY (()) label; |
3557 | struct var_loc_node * GTY (()) next; |
3558 | var_loc_view view; |
3559 | }; |
3560 | |
3561 | /* Variable location list. */ |
3562 | struct GTY ((for_user)) var_loc_list_def { |
3563 | struct var_loc_node * GTY (()) first; |
3564 | |
3565 | /* Pointer to the last but one or last element of the |
3566 | chained list. If the list is empty, both first and |
3567 | last are NULL, if the list contains just one node |
3568 | or the last node certainly is not redundant, it points |
3569 | to the last node, otherwise points to the last but one. |
3570 | Do not mark it for GC because it is marked through the chain. */ |
3571 | struct var_loc_node * GTY ((skip ("%h" ))) last; |
3572 | |
3573 | /* Pointer to the last element before section switch, |
3574 | if NULL, either sections weren't switched or first |
3575 | is after section switch. */ |
3576 | struct var_loc_node * GTY ((skip ("%h" ))) last_before_switch; |
3577 | |
3578 | /* DECL_UID of the variable decl. */ |
3579 | unsigned int decl_id; |
3580 | }; |
3581 | typedef struct var_loc_list_def var_loc_list; |
3582 | |
3583 | /* Call argument location list. */ |
3584 | struct GTY ((chain_next ("%h.next" ))) call_arg_loc_node { |
3585 | rtx GTY (()) call_arg_loc_note; |
3586 | const char * GTY (()) label; |
3587 | tree GTY (()) block; |
3588 | bool tail_call_p; |
3589 | rtx GTY (()) symbol_ref; |
3590 | struct call_arg_loc_node * GTY (()) next; |
3591 | }; |
3592 | |
3593 | |
3594 | struct decl_loc_hasher : ggc_ptr_hash<var_loc_list> |
3595 | { |
3596 | typedef const_tree compare_type; |
3597 | |
3598 | static hashval_t hash (var_loc_list *); |
3599 | static bool equal (var_loc_list *, const_tree); |
3600 | }; |
3601 | |
3602 | /* Table of decl location linked lists. */ |
3603 | static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table; |
3604 | |
3605 | /* Head and tail of call_arg_loc chain. */ |
3606 | static GTY (()) struct call_arg_loc_node *call_arg_locations; |
3607 | static struct call_arg_loc_node *call_arg_loc_last; |
3608 | |
3609 | /* Number of call sites in the current function. */ |
3610 | static int call_site_count = -1; |
3611 | /* Number of tail call sites in the current function. */ |
3612 | static int tail_call_site_count = -1; |
3613 | |
3614 | /* A cached location list. */ |
3615 | struct GTY ((for_user)) cached_dw_loc_list_def { |
3616 | /* The DECL_UID of the decl that this entry describes. */ |
3617 | unsigned int decl_id; |
3618 | |
3619 | /* The cached location list. */ |
3620 | dw_loc_list_ref loc_list; |
3621 | }; |
3622 | typedef struct cached_dw_loc_list_def cached_dw_loc_list; |
3623 | |
3624 | struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list> |
3625 | { |
3626 | |
3627 | typedef const_tree compare_type; |
3628 | |
3629 | static hashval_t hash (cached_dw_loc_list *); |
3630 | static bool equal (cached_dw_loc_list *, const_tree); |
3631 | }; |
3632 | |
3633 | /* Table of cached location lists. */ |
3634 | static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table; |
3635 | |
3636 | /* A vector of references to DIE's that are uniquely identified by their tag, |
3637 | presence/absence of children DIE's, and list of attribute/value pairs. */ |
3638 | static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table; |
3639 | |
3640 | /* A hash map to remember the stack usage for DWARF procedures. The value |
3641 | stored is the stack size difference between before the DWARF procedure |
3642 | invokation and after it returned. In other words, for a DWARF procedure |
3643 | that consumes N stack slots and that pushes M ones, this stores M - N. */ |
3644 | static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map; |
3645 | |
3646 | /* A global counter for generating labels for line number data. */ |
3647 | static unsigned int line_info_label_num; |
3648 | |
3649 | /* The current table to which we should emit line number information |
3650 | for the current function. This will be set up at the beginning of |
3651 | assembly for the function. */ |
3652 | static GTY(()) dw_line_info_table *cur_line_info_table; |
3653 | |
3654 | /* The two default tables of line number info. */ |
3655 | static GTY(()) dw_line_info_table *text_section_line_info; |
3656 | static GTY(()) dw_line_info_table *cold_text_section_line_info; |
3657 | |
3658 | /* The set of all non-default tables of line number info. */ |
3659 | static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info; |
3660 | |
3661 | /* A flag to tell pubnames/types export if there is an info section to |
3662 | refer to. */ |
3663 | static bool info_section_emitted; |
3664 | |
3665 | /* A pointer to the base of a table that contains a list of publicly |
3666 | accessible names. */ |
3667 | static GTY (()) vec<pubname_entry, va_gc> *pubname_table; |
3668 | |
3669 | /* A pointer to the base of a table that contains a list of publicly |
3670 | accessible types. */ |
3671 | static GTY (()) vec<pubname_entry, va_gc> *pubtype_table; |
3672 | |
3673 | /* A pointer to the base of a table that contains a list of macro |
3674 | defines/undefines (and file start/end markers). */ |
3675 | static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table; |
3676 | |
3677 | /* True if .debug_macinfo or .debug_macros section is going to be |
3678 | emitted. */ |
3679 | #define have_macinfo \ |
3680 | ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \ |
3681 | && debug_info_level >= DINFO_LEVEL_VERBOSE \ |
3682 | && !macinfo_table->is_empty ()) |
3683 | |
3684 | /* Vector of dies for which we should generate .debug_ranges info. */ |
3685 | static GTY (()) vec<dw_ranges, va_gc> *ranges_table; |
3686 | |
3687 | /* Vector of pairs of labels referenced in ranges_table. */ |
3688 | static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label; |
3689 | |
3690 | /* Whether we have location lists that need outputting */ |
3691 | static GTY(()) bool have_location_lists; |
3692 | |
3693 | /* Unique label counter. */ |
3694 | static GTY(()) unsigned int loclabel_num; |
3695 | |
3696 | /* Unique label counter for point-of-call tables. */ |
3697 | static GTY(()) unsigned int poc_label_num; |
3698 | |
3699 | /* The last file entry emitted by maybe_emit_file(). */ |
3700 | static GTY(()) struct dwarf_file_data * last_emitted_file; |
3701 | |
3702 | /* Number of internal labels generated by gen_internal_sym(). */ |
3703 | static GTY(()) int label_num; |
3704 | |
3705 | static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table; |
3706 | |
3707 | /* Instances of generic types for which we need to generate debug |
3708 | info that describe their generic parameters and arguments. That |
3709 | generation needs to happen once all types are properly laid out so |
3710 | we do it at the end of compilation. */ |
3711 | static GTY(()) vec<tree, va_gc> *generic_type_instances; |
3712 | |
3713 | /* Offset from the "steady-state frame pointer" to the frame base, |
3714 | within the current function. */ |
3715 | static poly_int64 frame_pointer_fb_offset; |
3716 | static bool frame_pointer_fb_offset_valid; |
3717 | |
3718 | static vec<dw_die_ref> base_types; |
3719 | |
3720 | /* Flags to represent a set of attribute classes for attributes that represent |
3721 | a scalar value (bounds, pointers, ...). */ |
3722 | enum dw_scalar_form |
3723 | { |
3724 | dw_scalar_form_constant = 0x01, |
3725 | dw_scalar_form_exprloc = 0x02, |
3726 | dw_scalar_form_reference = 0x04 |
3727 | }; |
3728 | |
3729 | /* Forward declarations for functions defined in this file. */ |
3730 | |
3731 | static bool is_pseudo_reg (const_rtx); |
3732 | static tree type_main_variant (tree); |
3733 | static bool is_tagged_type (const_tree); |
3734 | static const char *dwarf_tag_name (unsigned); |
3735 | static const char *dwarf_attr_name (unsigned); |
3736 | static const char *dwarf_form_name (unsigned); |
3737 | static tree decl_ultimate_origin (const_tree); |
3738 | static tree decl_class_context (tree); |
3739 | static void add_dwarf_attr (dw_die_ref, dw_attr_node *); |
3740 | static inline unsigned int AT_index (dw_attr_node *); |
3741 | static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned); |
3742 | static inline unsigned AT_flag (dw_attr_node *); |
3743 | static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT); |
3744 | static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT); |
3745 | static void add_AT_double (dw_die_ref, enum dwarf_attribute, |
3746 | HOST_WIDE_INT, unsigned HOST_WIDE_INT); |
3747 | static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int, |
3748 | unsigned int, unsigned char *); |
3749 | static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *); |
3750 | static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *); |
3751 | static inline const char *AT_string (dw_attr_node *); |
3752 | static enum dwarf_form AT_string_form (dw_attr_node *); |
3753 | static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref); |
3754 | static void add_AT_specification (dw_die_ref, dw_die_ref); |
3755 | static inline dw_die_ref AT_ref (dw_attr_node *); |
3756 | static inline int AT_ref_external (dw_attr_node *); |
3757 | static inline void set_AT_ref_external (dw_attr_node *, int); |
3758 | static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref); |
3759 | static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute, |
3760 | dw_loc_list_ref); |
3761 | static inline dw_loc_list_ref AT_loc_list (dw_attr_node *); |
3762 | static void add_AT_view_list (dw_die_ref, enum dwarf_attribute); |
3763 | static inline dw_loc_list_ref AT_loc_list (dw_attr_node *); |
3764 | static addr_table_entry *add_addr_table_entry (void *, enum ate_kind); |
3765 | static void remove_addr_table_entry (addr_table_entry *); |
3766 | static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool); |
3767 | static inline rtx AT_addr (dw_attr_node *); |
3768 | static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *); |
3769 | static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *); |
3770 | static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *); |
3771 | static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *); |
3772 | static void add_AT_range_list (dw_die_ref, enum dwarf_attribute, |
3773 | unsigned long, bool); |
3774 | static inline const char *AT_lbl (dw_attr_node *); |
3775 | static const char *get_AT_low_pc (dw_die_ref); |
3776 | static bool is_c (void); |
3777 | static bool is_cxx (void); |
3778 | static bool is_cxx (const_tree); |
3779 | static bool is_fortran (void); |
3780 | static bool is_ada (void); |
3781 | static bool remove_AT (dw_die_ref, enum dwarf_attribute); |
3782 | static void remove_child_TAG (dw_die_ref, enum dwarf_tag); |
3783 | static void add_child_die (dw_die_ref, dw_die_ref); |
3784 | static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree); |
3785 | static dw_die_ref strip_naming_typedef (tree, dw_die_ref); |
3786 | static dw_die_ref lookup_type_die_strip_naming_typedef (tree); |
3787 | static void equate_type_number_to_die (tree, dw_die_ref); |
3788 | static var_loc_list *lookup_decl_loc (const_tree); |
3789 | static void equate_decl_number_to_die (tree, dw_die_ref); |
3790 | static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view); |
3791 | static void print_spaces (FILE *); |
3792 | static void print_die (dw_die_ref, FILE *); |
3793 | static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *); |
3794 | static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *); |
3795 | static void die_checksum (dw_die_ref, struct md5_ctx *, int *); |
3796 | static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *); |
3797 | static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *); |
3798 | static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *); |
3799 | static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *, |
3800 | struct md5_ctx *, int *); |
3801 | struct checksum_attributes; |
3802 | static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref); |
3803 | static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *); |
3804 | static void checksum_die_context (dw_die_ref, struct md5_ctx *); |
3805 | static void generate_type_signature (dw_die_ref, comdat_type_node *); |
3806 | static bool same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *); |
3807 | static bool same_dw_val_p (const dw_val_node *, const dw_val_node *, int *); |
3808 | static bool same_attr_p (dw_attr_node *, dw_attr_node *, int *); |
3809 | static bool same_die_p (dw_die_ref, dw_die_ref, int *); |
3810 | static bool is_type_die (dw_die_ref); |
3811 | static inline bool is_template_instantiation (dw_die_ref); |
3812 | static bool is_declaration_die (dw_die_ref); |
3813 | static bool should_move_die_to_comdat (dw_die_ref); |
3814 | static dw_die_ref clone_as_declaration (dw_die_ref); |
3815 | static dw_die_ref clone_die (dw_die_ref); |
3816 | static dw_die_ref clone_tree (dw_die_ref); |
3817 | static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref); |
3818 | static void generate_skeleton_ancestor_tree (skeleton_chain_node *); |
3819 | static void generate_skeleton_bottom_up (skeleton_chain_node *); |
3820 | static dw_die_ref generate_skeleton (dw_die_ref); |
3821 | static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref, |
3822 | dw_die_ref, |
3823 | dw_die_ref); |
3824 | static void break_out_comdat_types (dw_die_ref); |
3825 | static void copy_decls_for_unworthy_types (dw_die_ref); |
3826 | |
3827 | static void add_sibling_attributes (dw_die_ref); |
3828 | static void output_location_lists (dw_die_ref); |
3829 | static int constant_size (unsigned HOST_WIDE_INT); |
3830 | static unsigned long size_of_die (dw_die_ref); |
3831 | static void calc_die_sizes (dw_die_ref); |
3832 | static void calc_base_type_die_sizes (void); |
3833 | static void mark_dies (dw_die_ref); |
3834 | static void unmark_dies (dw_die_ref); |
3835 | static void unmark_all_dies (dw_die_ref); |
3836 | static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *); |
3837 | static unsigned long size_of_aranges (void); |
3838 | static enum dwarf_form value_format (dw_attr_node *); |
3839 | static void output_value_format (dw_attr_node *); |
3840 | static void output_abbrev_section (void); |
3841 | static void output_die_abbrevs (unsigned long, dw_die_ref); |
3842 | static void output_die (dw_die_ref); |
3843 | static void output_compilation_unit_header (enum dwarf_unit_type); |
3844 | static void output_comp_unit (dw_die_ref, int, const unsigned char *); |
3845 | static void output_comdat_type_unit (comdat_type_node *, bool); |
3846 | static const char *dwarf2_name (tree, int); |
3847 | static void add_pubname (tree, dw_die_ref); |
3848 | static void add_enumerator_pubname (const char *, dw_die_ref); |
3849 | static void add_pubname_string (const char *, dw_die_ref); |
3850 | static void add_pubtype (tree, dw_die_ref); |
3851 | static void output_pubnames (vec<pubname_entry, va_gc> *); |
3852 | static void output_aranges (void); |
3853 | static unsigned int add_ranges (const_tree, bool = false); |
3854 | static void add_ranges_by_labels (dw_die_ref, const char *, const char *, |
3855 | bool *, bool); |
3856 | static void output_ranges (void); |
3857 | static dw_line_info_table *new_line_info_table (void); |
3858 | static void output_line_info (bool); |
3859 | static void output_file_names (void); |
3860 | static bool is_base_type (tree); |
3861 | static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref); |
3862 | static int decl_quals (const_tree); |
3863 | static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref); |
3864 | static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref); |
3865 | static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref); |
3866 | static unsigned int debugger_reg_number (const_rtx); |
3867 | static void add_loc_descr_op_piece (dw_loc_descr_ref *, int); |
3868 | static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status); |
3869 | static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int, |
3870 | enum var_init_status); |
3871 | static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx, |
3872 | enum var_init_status); |
3873 | static dw_loc_descr_ref based_loc_descr (rtx, poly_int64, |
3874 | enum var_init_status); |
3875 | static bool is_based_loc (const_rtx); |
3876 | static bool resolve_one_addr (rtx *); |
3877 | static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx, |
3878 | enum var_init_status); |
3879 | static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode, |
3880 | enum var_init_status); |
3881 | struct loc_descr_context; |
3882 | static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref); |
3883 | static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list); |
3884 | static dw_loc_list_ref loc_list_from_tree (tree, int, |
3885 | struct loc_descr_context *); |
3886 | static dw_loc_descr_ref loc_descriptor_from_tree (tree, int, |
3887 | struct loc_descr_context *); |
3888 | static tree field_type (const_tree); |
3889 | static unsigned int simple_type_align_in_bits (const_tree); |
3890 | static unsigned int simple_decl_align_in_bits (const_tree); |
3891 | static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree); |
3892 | struct vlr_context; |
3893 | static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *, |
3894 | HOST_WIDE_INT *); |
3895 | static void add_AT_location_description (dw_die_ref, enum dwarf_attribute, |
3896 | dw_loc_list_ref); |
3897 | static void add_data_member_location_attribute (dw_die_ref, tree, |
3898 | struct vlr_context *); |
3899 | static bool add_const_value_attribute (dw_die_ref, machine_mode, rtx); |
3900 | static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *); |
3901 | static void insert_wide_int (const wide_int_ref &, unsigned char *, int); |
3902 | static unsigned insert_float (const_rtx, unsigned char *); |
3903 | static rtx rtl_for_decl_location (tree); |
3904 | static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool); |
3905 | static bool tree_add_const_value_attribute (dw_die_ref, tree); |
3906 | static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree); |
3907 | static void add_desc_attribute (dw_die_ref, tree); |
3908 | static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref); |
3909 | static void add_comp_dir_attribute (dw_die_ref); |
3910 | static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int, |
3911 | struct loc_descr_context *); |
3912 | static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree, |
3913 | struct loc_descr_context *); |
3914 | static void add_subscript_info (dw_die_ref, tree, bool); |
3915 | static void add_byte_size_attribute (dw_die_ref, tree); |
3916 | static void add_alignment_attribute (dw_die_ref, tree); |
3917 | static void add_bit_offset_attribute (dw_die_ref, tree); |
3918 | static void add_bit_size_attribute (dw_die_ref, tree); |
3919 | static void add_prototyped_attribute (dw_die_ref, tree); |
3920 | static void add_abstract_origin_attribute (dw_die_ref, tree); |
3921 | static void add_pure_or_virtual_attribute (dw_die_ref, tree); |
3922 | static void add_src_coords_attributes (dw_die_ref, tree); |
3923 | static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false); |
3924 | static void add_discr_value (dw_die_ref, dw_discr_value *); |
3925 | static void add_discr_list (dw_die_ref, dw_discr_list_ref); |
3926 | static inline dw_discr_list_ref AT_discr_list (dw_attr_node *); |
3927 | static dw_die_ref scope_die_for (tree, dw_die_ref); |
3928 | static inline bool local_scope_p (dw_die_ref); |
3929 | static inline bool class_scope_p (dw_die_ref); |
3930 | static inline bool class_or_namespace_scope_p (dw_die_ref); |
3931 | static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref); |
3932 | static void add_calling_convention_attribute (dw_die_ref, tree); |
3933 | static const char *type_tag (const_tree); |
3934 | static tree member_declared_type (const_tree); |
3935 | #if 0 |
3936 | static const char *decl_start_label (tree); |
3937 | #endif |
3938 | static void gen_array_type_die (tree, dw_die_ref); |
3939 | static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref); |
3940 | #if 0 |
3941 | static void gen_entry_point_die (tree, dw_die_ref); |
3942 | #endif |
3943 | static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref, bool); |
3944 | static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref); |
3945 | static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*); |
3946 | static void gen_unspecified_parameters_die (tree, dw_die_ref); |
3947 | static void gen_formal_types_die (tree, dw_die_ref); |
3948 | static void gen_subprogram_die (tree, dw_die_ref); |
3949 | static void gen_variable_die (tree, tree, dw_die_ref); |
3950 | static void gen_const_die (tree, dw_die_ref); |
3951 | static void gen_label_die (tree, dw_die_ref); |
3952 | static void gen_lexical_block_die (tree, dw_die_ref); |
3953 | static void gen_inlined_subroutine_die (tree, dw_die_ref); |
3954 | static void gen_field_die (tree, struct vlr_context *, dw_die_ref); |
3955 | static void gen_ptr_to_mbr_type_die (tree, dw_die_ref); |
3956 | static dw_die_ref gen_compile_unit_die (const char *); |
3957 | static void gen_inheritance_die (tree, tree, tree, dw_die_ref); |
3958 | static void gen_member_die (tree, dw_die_ref); |
3959 | static void gen_struct_or_union_type_die (tree, dw_die_ref, |
3960 | enum debug_info_usage); |
3961 | static void gen_subroutine_type_die (tree, dw_die_ref); |
3962 | static void gen_typedef_die (tree, dw_die_ref); |
3963 | static void gen_type_die (tree, dw_die_ref, bool = false); |
3964 | static void gen_block_die (tree, dw_die_ref); |
3965 | static void decls_for_scope (tree, dw_die_ref, bool = true); |
3966 | static bool is_naming_typedef_decl (const_tree); |
3967 | static inline dw_die_ref get_context_die (tree); |
3968 | static void gen_namespace_die (tree, dw_die_ref); |
3969 | static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree); |
3970 | static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref); |
3971 | static dw_die_ref force_decl_die (tree); |
3972 | static dw_die_ref force_type_die (tree); |
3973 | static dw_die_ref setup_namespace_context (tree, dw_die_ref); |
3974 | static dw_die_ref declare_in_namespace (tree, dw_die_ref); |
3975 | static struct dwarf_file_data * lookup_filename (const char *); |
3976 | static void retry_incomplete_types (void); |
3977 | static void gen_type_die_for_member (tree, tree, dw_die_ref); |
3978 | static void gen_generic_params_dies (tree); |
3979 | static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage, |
3980 | bool = false); |
3981 | static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage, |
3982 | bool = false); |
3983 | static void splice_child_die (dw_die_ref, dw_die_ref); |
3984 | static int file_info_cmp (const void *, const void *); |
3985 | static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view, |
3986 | const char *, var_loc_view, const char *); |
3987 | static void output_loc_list (dw_loc_list_ref); |
3988 | static char *gen_internal_sym (const char *); |
3989 | static bool want_pubnames (void); |
3990 | |
3991 | static void prune_unmark_dies (dw_die_ref); |
3992 | static void prune_unused_types_mark_generic_parms_dies (dw_die_ref); |
3993 | static void prune_unused_types_mark (dw_die_ref, int); |
3994 | static void prune_unused_types_walk (dw_die_ref); |
3995 | static void prune_unused_types_walk_attribs (dw_die_ref); |
3996 | static void prune_unused_types_prune (dw_die_ref); |
3997 | static void prune_unused_types (void); |
3998 | static int maybe_emit_file (struct dwarf_file_data *fd); |
3999 | static inline const char *AT_vms_delta1 (dw_attr_node *); |
4000 | static inline const char *AT_vms_delta2 (dw_attr_node *); |
4001 | #if VMS_DEBUGGING_INFO |
4002 | static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute, |
4003 | const char *, const char *); |
4004 | #endif |
4005 | static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree); |
4006 | static void gen_remaining_tmpl_value_param_die_attribute (void); |
4007 | static bool generic_type_p (tree); |
4008 | static void schedule_generic_params_dies_gen (tree t); |
4009 | static void gen_scheduled_generic_parms_dies (void); |
4010 | static void resolve_variable_values (void); |
4011 | |
4012 | static const char *comp_dir_string (void); |
4013 | |
4014 | static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &); |
4015 | |
4016 | /* enum for tracking thread-local variables whose address is really an offset |
4017 | relative to the TLS pointer, which will need link-time relocation, but will |
4018 | not need relocation by the DWARF consumer. */ |
4019 | |
4020 | enum dtprel_bool |
4021 | { |
4022 | dtprel_false = 0, |
4023 | dtprel_true = 1 |
4024 | }; |
4025 | |
4026 | /* Return the operator to use for an address of a variable. For dtprel_true, we |
4027 | use DW_OP_const*. For regular variables, which need both link-time |
4028 | relocation and consumer-level relocation (e.g., to account for shared objects |
4029 | loaded at a random address), we use DW_OP_addr*. */ |
4030 | |
4031 | static inline enum dwarf_location_atom |
4032 | dw_addr_op (enum dtprel_bool dtprel) |
4033 | { |
4034 | if (dtprel == dtprel_true) |
4035 | return (dwarf_split_debug_info ? dwarf_OP (op: DW_OP_constx) |
4036 | : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u)); |
4037 | else |
4038 | return dwarf_split_debug_info ? dwarf_OP (op: DW_OP_addrx) : DW_OP_addr; |
4039 | } |
4040 | |
4041 | /* Return a pointer to a newly allocated address location description. If |
4042 | dwarf_split_debug_info is true, then record the address with the appropriate |
4043 | relocation. */ |
4044 | static inline dw_loc_descr_ref |
4045 | new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel) |
4046 | { |
4047 | dw_loc_descr_ref ref = new_loc_descr (op: dw_addr_op (dtprel), oprnd1: 0, oprnd2: 0); |
4048 | |
4049 | ref->dw_loc_oprnd1.val_class = dw_val_class_addr; |
4050 | ref->dw_loc_oprnd1.v.val_addr = addr; |
4051 | ref->dtprel = dtprel; |
4052 | if (dwarf_split_debug_info) |
4053 | ref->dw_loc_oprnd1.val_entry |
4054 | = add_addr_table_entry (addr, |
4055 | dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx); |
4056 | else |
4057 | ref->dw_loc_oprnd1.val_entry = NULL; |
4058 | |
4059 | return ref; |
4060 | } |
4061 | |
4062 | /* Section names used to hold DWARF debugging information. */ |
4063 | |
4064 | #ifndef DEBUG_INFO_SECTION |
4065 | #define DEBUG_INFO_SECTION ".debug_info" |
4066 | #endif |
4067 | #ifndef DEBUG_DWO_INFO_SECTION |
4068 | #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo" |
4069 | #endif |
4070 | #ifndef DEBUG_LTO_INFO_SECTION |
4071 | #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info" |
4072 | #endif |
4073 | #ifndef DEBUG_LTO_DWO_INFO_SECTION |
4074 | #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo" |
4075 | #endif |
4076 | #ifndef DEBUG_ABBREV_SECTION |
4077 | #define DEBUG_ABBREV_SECTION ".debug_abbrev" |
4078 | #endif |
4079 | #ifndef DEBUG_LTO_ABBREV_SECTION |
4080 | #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev" |
4081 | #endif |
4082 | #ifndef DEBUG_DWO_ABBREV_SECTION |
4083 | #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo" |
4084 | #endif |
4085 | #ifndef DEBUG_LTO_DWO_ABBREV_SECTION |
4086 | #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo" |
4087 | #endif |
4088 | #ifndef DEBUG_ARANGES_SECTION |
4089 | #define DEBUG_ARANGES_SECTION ".debug_aranges" |
4090 | #endif |
4091 | #ifndef DEBUG_ADDR_SECTION |
4092 | #define DEBUG_ADDR_SECTION ".debug_addr" |
4093 | #endif |
4094 | #ifndef DEBUG_MACINFO_SECTION |
4095 | #define DEBUG_MACINFO_SECTION ".debug_macinfo" |
4096 | #endif |
4097 | #ifndef DEBUG_LTO_MACINFO_SECTION |
4098 | #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo" |
4099 | #endif |
4100 | #ifndef DEBUG_DWO_MACINFO_SECTION |
4101 | #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo" |
4102 | #endif |
4103 | #ifndef DEBUG_LTO_DWO_MACINFO_SECTION |
4104 | #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo" |
4105 | #endif |
4106 | #ifndef DEBUG_MACRO_SECTION |
4107 | #define DEBUG_MACRO_SECTION ".debug_macro" |
4108 | #endif |
4109 | #ifndef DEBUG_LTO_MACRO_SECTION |
4110 | #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro" |
4111 | #endif |
4112 | #ifndef DEBUG_DWO_MACRO_SECTION |
4113 | #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo" |
4114 | #endif |
4115 | #ifndef DEBUG_LTO_DWO_MACRO_SECTION |
4116 | #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo" |
4117 | #endif |
4118 | #ifndef DEBUG_LINE_SECTION |
4119 | #define DEBUG_LINE_SECTION ".debug_line" |
4120 | #endif |
4121 | #ifndef DEBUG_LTO_LINE_SECTION |
4122 | #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line" |
4123 | #endif |
4124 | #ifndef DEBUG_DWO_LINE_SECTION |
4125 | #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo" |
4126 | #endif |
4127 | #ifndef DEBUG_LTO_DWO_LINE_SECTION |
4128 | #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo" |
4129 | #endif |
4130 | #ifndef DEBUG_LOC_SECTION |
4131 | #define DEBUG_LOC_SECTION ".debug_loc" |
4132 | #endif |
4133 | #ifndef DEBUG_DWO_LOC_SECTION |
4134 | #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo" |
4135 | #endif |
4136 | #ifndef DEBUG_LOCLISTS_SECTION |
4137 | #define DEBUG_LOCLISTS_SECTION ".debug_loclists" |
4138 | #endif |
4139 | #ifndef DEBUG_DWO_LOCLISTS_SECTION |
4140 | #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo" |
4141 | #endif |
4142 | #ifndef DEBUG_PUBNAMES_SECTION |
4143 | #define DEBUG_PUBNAMES_SECTION \ |
4144 | ((debug_generate_pub_sections == 2) \ |
4145 | ? ".debug_gnu_pubnames" : ".debug_pubnames") |
4146 | #endif |
4147 | #ifndef DEBUG_PUBTYPES_SECTION |
4148 | #define DEBUG_PUBTYPES_SECTION \ |
4149 | ((debug_generate_pub_sections == 2) \ |
4150 | ? ".debug_gnu_pubtypes" : ".debug_pubtypes") |
4151 | #endif |
4152 | #ifndef DEBUG_STR_OFFSETS_SECTION |
4153 | #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets" |
4154 | #endif |
4155 | #ifndef DEBUG_DWO_STR_OFFSETS_SECTION |
4156 | #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo" |
4157 | #endif |
4158 | #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION |
4159 | #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo" |
4160 | #endif |
4161 | #ifndef DEBUG_STR_SECTION |
4162 | #define DEBUG_STR_SECTION ".debug_str" |
4163 | #endif |
4164 | #ifndef DEBUG_LTO_STR_SECTION |
4165 | #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str" |
4166 | #endif |
4167 | #ifndef DEBUG_STR_DWO_SECTION |
4168 | #define DEBUG_STR_DWO_SECTION ".debug_str.dwo" |
4169 | #endif |
4170 | #ifndef DEBUG_LTO_STR_DWO_SECTION |
4171 | #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo" |
4172 | #endif |
4173 | #ifndef DEBUG_RANGES_SECTION |
4174 | #define DEBUG_RANGES_SECTION ".debug_ranges" |
4175 | #endif |
4176 | #ifndef DEBUG_RNGLISTS_SECTION |
4177 | #define DEBUG_RNGLISTS_SECTION ".debug_rnglists" |
4178 | #endif |
4179 | #ifndef DEBUG_DWO_RNGLISTS_SECTION |
4180 | #define DEBUG_DWO_RNGLISTS_SECTION ".debug_rnglists.dwo" |
4181 | #endif |
4182 | #ifndef DEBUG_LINE_STR_SECTION |
4183 | #define DEBUG_LINE_STR_SECTION ".debug_line_str" |
4184 | #endif |
4185 | #ifndef DEBUG_LTO_LINE_STR_SECTION |
4186 | #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str" |
4187 | #endif |
4188 | |
4189 | /* Section flags for .debug_str section. */ |
4190 | #define DEBUG_STR_SECTION_FLAGS \ |
4191 | (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \ |
4192 | ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \ |
4193 | : SECTION_DEBUG) |
4194 | |
4195 | /* Section flags for .debug_str.dwo section. */ |
4196 | #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE) |
4197 | |
4198 | /* Attribute used to refer to the macro section. */ |
4199 | #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \ |
4200 | : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros) |
4201 | |
4202 | /* Labels we insert at beginning sections we can reference instead of |
4203 | the section names themselves. */ |
4204 | |
4205 | #ifndef TEXT_SECTION_LABEL |
4206 | #define TEXT_SECTION_LABEL "Ltext" |
4207 | #endif |
4208 | #ifndef COLD_TEXT_SECTION_LABEL |
4209 | #define COLD_TEXT_SECTION_LABEL "Ltext_cold" |
4210 | #endif |
4211 | #ifndef DEBUG_LINE_SECTION_LABEL |
4212 | #define DEBUG_LINE_SECTION_LABEL "Ldebug_line" |
4213 | #endif |
4214 | #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL |
4215 | #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line" |
4216 | #endif |
4217 | #ifndef DEBUG_INFO_SECTION_LABEL |
4218 | #define DEBUG_INFO_SECTION_LABEL "Ldebug_info" |
4219 | #endif |
4220 | #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL |
4221 | #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info" |
4222 | #endif |
4223 | #ifndef DEBUG_ABBREV_SECTION_LABEL |
4224 | #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev" |
4225 | #endif |
4226 | #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL |
4227 | #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev" |
4228 | #endif |
4229 | #ifndef DEBUG_ADDR_SECTION_LABEL |
4230 | #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr" |
4231 | #endif |
4232 | #ifndef DEBUG_LOC_SECTION_LABEL |
4233 | #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc" |
4234 | #endif |
4235 | #ifndef DEBUG_RANGES_SECTION_LABEL |
4236 | #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges" |
4237 | #endif |
4238 | #ifndef DEBUG_MACINFO_SECTION_LABEL |
4239 | #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo" |
4240 | #endif |
4241 | #ifndef DEBUG_MACRO_SECTION_LABEL |
4242 | #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro" |
4243 | #endif |
4244 | #define SKELETON_COMP_DIE_ABBREV 1 |
4245 | #define SKELETON_TYPE_DIE_ABBREV 2 |
4246 | |
4247 | /* Definitions of defaults for formats and names of various special |
4248 | (artificial) labels which may be generated within this file (when the -g |
4249 | options is used and DWARF2_DEBUGGING_INFO is in effect. |
4250 | If necessary, these may be overridden from within the tm.h file, but |
4251 | typically, overriding these defaults is unnecessary. */ |
4252 | |
4253 | static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4254 | static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4255 | static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4256 | static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4257 | static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4258 | static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4259 | static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4260 | static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4261 | static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4262 | static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4263 | static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4264 | static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4265 | static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4266 | static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES]; |
4267 | static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES]; |
4268 | |
4269 | #ifndef TEXT_END_LABEL |
4270 | #define TEXT_END_LABEL "Letext" |
4271 | #endif |
4272 | #ifndef COLD_END_LABEL |
4273 | #define COLD_END_LABEL "Letext_cold" |
4274 | #endif |
4275 | #ifndef BLOCK_BEGIN_LABEL |
4276 | #define BLOCK_BEGIN_LABEL "LBB" |
4277 | #endif |
4278 | #ifndef BLOCK_INLINE_ENTRY_LABEL |
4279 | #define BLOCK_INLINE_ENTRY_LABEL "LBI" |
4280 | #endif |
4281 | #ifndef BLOCK_END_LABEL |
4282 | #define BLOCK_END_LABEL "LBE" |
4283 | #endif |
4284 | #ifndef LINE_CODE_LABEL |
4285 | #define LINE_CODE_LABEL "LM" |
4286 | #endif |
4287 | |
4288 | |
4289 | /* Return the root of the DIE's built for the current compilation unit. */ |
4290 | static dw_die_ref |
4291 | comp_unit_die (void) |
4292 | { |
4293 | if (!single_comp_unit_die) |
4294 | single_comp_unit_die = gen_compile_unit_die (NULL); |
4295 | return single_comp_unit_die; |
4296 | } |
4297 | |
4298 | /* We allow a language front-end to designate a function that is to be |
4299 | called to "demangle" any name before it is put into a DIE. */ |
4300 | |
4301 | static const char *(*demangle_name_func) (const char *); |
4302 | |
4303 | void |
4304 | dwarf2out_set_demangle_name_func (const char *(*func) (const char *)) |
4305 | { |
4306 | demangle_name_func = func; |
4307 | } |
4308 | |
4309 | /* Test if rtl node points to a pseudo register. */ |
4310 | |
4311 | static inline bool |
4312 | is_pseudo_reg (const_rtx rtl) |
4313 | { |
4314 | return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
4315 | || (GET_CODE (rtl) == SUBREG |
4316 | && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER)); |
4317 | } |
4318 | |
4319 | /* Return a reference to a type, with its const and volatile qualifiers |
4320 | removed. */ |
4321 | |
4322 | static inline tree |
4323 | type_main_variant (tree type) |
4324 | { |
4325 | type = TYPE_MAIN_VARIANT (type); |
4326 | |
4327 | /* ??? There really should be only one main variant among any group of |
4328 | variants of a given type (and all of the MAIN_VARIANT values for all |
4329 | members of the group should point to that one type) but sometimes the C |
4330 | front-end messes this up for array types, so we work around that bug |
4331 | here. */ |
4332 | if (TREE_CODE (type) == ARRAY_TYPE) |
4333 | while (type != TYPE_MAIN_VARIANT (type)) |
4334 | type = TYPE_MAIN_VARIANT (type); |
4335 | |
4336 | return type; |
4337 | } |
4338 | |
4339 | /* Return true if the given type node represents a tagged type. */ |
4340 | |
4341 | static inline bool |
4342 | is_tagged_type (const_tree type) |
4343 | { |
4344 | enum tree_code code = TREE_CODE (type); |
4345 | |
4346 | return (code == RECORD_TYPE || code == UNION_TYPE |
4347 | || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE); |
4348 | } |
4349 | |
4350 | /* Set label to debug_info_section_label + die_offset of a DIE reference. */ |
4351 | |
4352 | static void |
4353 | get_ref_die_offset_label (char *label, dw_die_ref ref) |
4354 | { |
4355 | sprintf (s: label, format: "%s+%ld" , debug_info_section_label, ref->die_offset); |
4356 | } |
4357 | |
4358 | /* Return die_offset of a DIE reference to a base type. */ |
4359 | |
4360 | static unsigned long int |
4361 | get_base_type_offset (dw_die_ref ref) |
4362 | { |
4363 | if (ref->die_offset) |
4364 | return ref->die_offset; |
4365 | if (comp_unit_die ()->die_abbrev) |
4366 | { |
4367 | calc_base_type_die_sizes (); |
4368 | gcc_assert (ref->die_offset); |
4369 | } |
4370 | return ref->die_offset; |
4371 | } |
4372 | |
4373 | /* Return die_offset of a DIE reference other than base type. */ |
4374 | |
4375 | static unsigned long int |
4376 | get_ref_die_offset (dw_die_ref ref) |
4377 | { |
4378 | gcc_assert (ref->die_offset); |
4379 | return ref->die_offset; |
4380 | } |
4381 | |
4382 | /* Convert a DIE tag into its string name. */ |
4383 | |
4384 | static const char * |
4385 | dwarf_tag_name (unsigned int tag) |
4386 | { |
4387 | const char *name = get_DW_TAG_name (tag); |
4388 | |
4389 | if (name != NULL) |
4390 | return name; |
4391 | |
4392 | return "DW_TAG_<unknown>" ; |
4393 | } |
4394 | |
4395 | /* Convert a DWARF attribute code into its string name. */ |
4396 | |
4397 | static const char * |
4398 | dwarf_attr_name (unsigned int attr) |
4399 | { |
4400 | const char *name; |
4401 | |
4402 | switch (attr) |
4403 | { |
4404 | #if VMS_DEBUGGING_INFO |
4405 | case DW_AT_HP_prologue: |
4406 | return "DW_AT_HP_prologue" ; |
4407 | #else |
4408 | case DW_AT_MIPS_loop_unroll_factor: |
4409 | return "DW_AT_MIPS_loop_unroll_factor" ; |
4410 | #endif |
4411 | |
4412 | #if VMS_DEBUGGING_INFO |
4413 | case DW_AT_HP_epilogue: |
4414 | return "DW_AT_HP_epilogue" ; |
4415 | #else |
4416 | case DW_AT_MIPS_stride: |
4417 | return "DW_AT_MIPS_stride" ; |
4418 | #endif |
4419 | } |
4420 | |
4421 | name = get_DW_AT_name (attr); |
4422 | |
4423 | if (name != NULL) |
4424 | return name; |
4425 | |
4426 | return "DW_AT_<unknown>" ; |
4427 | } |
4428 | |
4429 | /* Convert a DWARF value form code into its string name. */ |
4430 | |
4431 | static const char * |
4432 | dwarf_form_name (unsigned int form) |
4433 | { |
4434 | const char *name = get_DW_FORM_name (form); |
4435 | |
4436 | if (name != NULL) |
4437 | return name; |
4438 | |
4439 | return "DW_FORM_<unknown>" ; |
4440 | } |
4441 | |
4442 | /* Determine the "ultimate origin" of a decl. The decl may be an inlined |
4443 | instance of an inlined instance of a decl which is local to an inline |
4444 | function, so we have to trace all of the way back through the origin chain |
4445 | to find out what sort of node actually served as the original seed for the |
4446 | given block. */ |
4447 | |
4448 | static tree |
4449 | decl_ultimate_origin (const_tree decl) |
4450 | { |
4451 | if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON)) |
4452 | return NULL_TREE; |
4453 | |
4454 | /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if |
4455 | we're trying to output the abstract instance of this function. */ |
4456 | if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl) |
4457 | return NULL_TREE; |
4458 | |
4459 | /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the |
4460 | most distant ancestor, this should never happen. */ |
4461 | gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl))); |
4462 | |
4463 | return DECL_ABSTRACT_ORIGIN (decl); |
4464 | } |
4465 | |
4466 | /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT |
4467 | of a virtual function may refer to a base class, so we check the 'this' |
4468 | parameter. */ |
4469 | |
4470 | static tree |
4471 | decl_class_context (tree decl) |
4472 | { |
4473 | tree context = NULL_TREE; |
4474 | |
4475 | if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl)) |
4476 | context = DECL_CONTEXT (decl); |
4477 | else |
4478 | context = TYPE_MAIN_VARIANT |
4479 | (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))))); |
4480 | |
4481 | if (context && !TYPE_P (context)) |
4482 | context = NULL_TREE; |
4483 | |
4484 | return context; |
4485 | } |
4486 | |
4487 | /* Add an attribute/value pair to a DIE. */ |
4488 | |
4489 | static inline void |
4490 | add_dwarf_attr (dw_die_ref die, dw_attr_node *attr) |
4491 | { |
4492 | /* Maybe this should be an assert? */ |
4493 | if (die == NULL) |
4494 | return; |
4495 | |
4496 | if (flag_checking) |
4497 | { |
4498 | /* Check we do not add duplicate attrs. Can't use get_AT here |
4499 | because that recurses to the specification/abstract origin DIE. */ |
4500 | dw_attr_node *a; |
4501 | unsigned ix; |
4502 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
4503 | gcc_assert (a->dw_attr != attr->dw_attr); |
4504 | } |
4505 | |
4506 | vec_safe_reserve (v&: die->die_attr, nelems: 1); |
4507 | vec_safe_push (v&: die->die_attr, obj: *attr); |
4508 | } |
4509 | |
4510 | enum dw_val_class |
4511 | AT_class (dw_attr_node *a) |
4512 | { |
4513 | return a->dw_attr_val.val_class; |
4514 | } |
4515 | |
4516 | /* Return the index for any attribute that will be referenced with a |
4517 | DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String |
4518 | indices are stored in dw_attr_val.v.val_str for reference counting |
4519 | pruning. */ |
4520 | |
4521 | static inline unsigned int |
4522 | AT_index (dw_attr_node *a) |
4523 | { |
4524 | if (AT_class (a) == dw_val_class_str) |
4525 | return a->dw_attr_val.v.val_str->index; |
4526 | else if (a->dw_attr_val.val_entry != NULL) |
4527 | return a->dw_attr_val.val_entry->index; |
4528 | return NOT_INDEXED; |
4529 | } |
4530 | |
4531 | /* Add a flag value attribute to a DIE. */ |
4532 | |
4533 | static inline void |
4534 | add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag) |
4535 | { |
4536 | dw_attr_node attr; |
4537 | |
4538 | attr.dw_attr = attr_kind; |
4539 | attr.dw_attr_val.val_class = dw_val_class_flag; |
4540 | attr.dw_attr_val.val_entry = NULL; |
4541 | attr.dw_attr_val.v.val_flag = flag; |
4542 | add_dwarf_attr (die, attr: &attr); |
4543 | } |
4544 | |
4545 | static inline unsigned |
4546 | AT_flag (dw_attr_node *a) |
4547 | { |
4548 | gcc_assert (a && AT_class (a) == dw_val_class_flag); |
4549 | return a->dw_attr_val.v.val_flag; |
4550 | } |
4551 | |
4552 | /* Add a signed integer attribute value to a DIE. */ |
4553 | |
4554 | static inline void |
4555 | add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val) |
4556 | { |
4557 | dw_attr_node attr; |
4558 | |
4559 | attr.dw_attr = attr_kind; |
4560 | attr.dw_attr_val.val_class = dw_val_class_const; |
4561 | attr.dw_attr_val.val_entry = NULL; |
4562 | attr.dw_attr_val.v.val_int = int_val; |
4563 | add_dwarf_attr (die, attr: &attr); |
4564 | } |
4565 | |
4566 | HOST_WIDE_INT |
4567 | AT_int (dw_attr_node *a) |
4568 | { |
4569 | gcc_assert (a && (AT_class (a) == dw_val_class_const |
4570 | || AT_class (a) == dw_val_class_const_implicit)); |
4571 | return a->dw_attr_val.v.val_int; |
4572 | } |
4573 | |
4574 | /* Add an unsigned integer attribute value to a DIE. */ |
4575 | |
4576 | static inline void |
4577 | add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind, |
4578 | unsigned HOST_WIDE_INT unsigned_val) |
4579 | { |
4580 | dw_attr_node attr; |
4581 | |
4582 | attr.dw_attr = attr_kind; |
4583 | attr.dw_attr_val.val_class = dw_val_class_unsigned_const; |
4584 | attr.dw_attr_val.val_entry = NULL; |
4585 | attr.dw_attr_val.v.val_unsigned = unsigned_val; |
4586 | add_dwarf_attr (die, attr: &attr); |
4587 | } |
4588 | |
4589 | unsigned HOST_WIDE_INT |
4590 | AT_unsigned (dw_attr_node *a) |
4591 | { |
4592 | gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const |
4593 | || AT_class (a) == dw_val_class_unsigned_const_implicit)); |
4594 | return a->dw_attr_val.v.val_unsigned; |
4595 | } |
4596 | |
4597 | dw_wide_int * |
4598 | alloc_dw_wide_int (const wide_int_ref &w) |
4599 | { |
4600 | dw_wide_int *p |
4601 | = (dw_wide_int *) ggc_internal_alloc (s: sizeof (dw_wide_int) |
4602 | + ((w.get_len () - 1) |
4603 | * sizeof (HOST_WIDE_INT))); |
4604 | p->precision = w.get_precision (); |
4605 | p->len = w.get_len (); |
4606 | memcpy (dest: p->val, src: w.get_val (), n: p->len * sizeof (HOST_WIDE_INT)); |
4607 | return p; |
4608 | } |
4609 | |
4610 | /* Add an unsigned wide integer attribute value to a DIE. */ |
4611 | |
4612 | static inline void |
4613 | add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind, |
4614 | const wide_int_ref &w) |
4615 | { |
4616 | dw_attr_node attr; |
4617 | |
4618 | attr.dw_attr = attr_kind; |
4619 | attr.dw_attr_val.val_class = dw_val_class_wide_int; |
4620 | attr.dw_attr_val.val_entry = NULL; |
4621 | attr.dw_attr_val.v.val_wide = alloc_dw_wide_int (w); |
4622 | add_dwarf_attr (die, attr: &attr); |
4623 | } |
4624 | |
4625 | /* Add an unsigned double integer attribute value to a DIE. */ |
4626 | |
4627 | static inline void |
4628 | add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind, |
4629 | HOST_WIDE_INT high, unsigned HOST_WIDE_INT low) |
4630 | { |
4631 | dw_attr_node attr; |
4632 | |
4633 | attr.dw_attr = attr_kind; |
4634 | attr.dw_attr_val.val_class = dw_val_class_const_double; |
4635 | attr.dw_attr_val.val_entry = NULL; |
4636 | attr.dw_attr_val.v.val_double.high = high; |
4637 | attr.dw_attr_val.v.val_double.low = low; |
4638 | add_dwarf_attr (die, attr: &attr); |
4639 | } |
4640 | |
4641 | /* Add a floating point attribute value to a DIE and return it. */ |
4642 | |
4643 | static inline void |
4644 | add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind, |
4645 | unsigned int length, unsigned int elt_size, unsigned char *array) |
4646 | { |
4647 | dw_attr_node attr; |
4648 | |
4649 | attr.dw_attr = attr_kind; |
4650 | attr.dw_attr_val.val_class = dw_val_class_vec; |
4651 | attr.dw_attr_val.val_entry = NULL; |
4652 | attr.dw_attr_val.v.val_vec.length = length; |
4653 | attr.dw_attr_val.v.val_vec.elt_size = elt_size; |
4654 | attr.dw_attr_val.v.val_vec.array = array; |
4655 | add_dwarf_attr (die, attr: &attr); |
4656 | } |
4657 | |
4658 | /* Add an 8-byte data attribute value to a DIE. */ |
4659 | |
4660 | static inline void |
4661 | add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind, |
4662 | unsigned char data8[8]) |
4663 | { |
4664 | dw_attr_node attr; |
4665 | |
4666 | attr.dw_attr = attr_kind; |
4667 | attr.dw_attr_val.val_class = dw_val_class_data8; |
4668 | attr.dw_attr_val.val_entry = NULL; |
4669 | memcpy (dest: attr.dw_attr_val.v.val_data8, src: data8, n: 8); |
4670 | add_dwarf_attr (die, attr: &attr); |
4671 | } |
4672 | |
4673 | /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using |
4674 | dwarf_split_debug_info, address attributes in dies destined for the |
4675 | final executable have force_direct set to avoid using indexed |
4676 | references. */ |
4677 | |
4678 | static inline void |
4679 | add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high, |
4680 | bool force_direct) |
4681 | { |
4682 | dw_attr_node attr; |
4683 | char * lbl_id; |
4684 | |
4685 | lbl_id = xstrdup (lbl_low); |
4686 | attr.dw_attr = DW_AT_low_pc; |
4687 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
4688 | attr.dw_attr_val.v.val_lbl_id = lbl_id; |
4689 | if (dwarf_split_debug_info && !force_direct) |
4690 | attr.dw_attr_val.val_entry |
4691 | = add_addr_table_entry (lbl_id, ate_kind_label); |
4692 | else |
4693 | attr.dw_attr_val.val_entry = NULL; |
4694 | add_dwarf_attr (die, attr: &attr); |
4695 | |
4696 | attr.dw_attr = DW_AT_high_pc; |
4697 | if (dwarf_version < 4) |
4698 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
4699 | else |
4700 | attr.dw_attr_val.val_class = dw_val_class_high_pc; |
4701 | lbl_id = xstrdup (lbl_high); |
4702 | attr.dw_attr_val.v.val_lbl_id = lbl_id; |
4703 | if (attr.dw_attr_val.val_class == dw_val_class_lbl_id |
4704 | && dwarf_split_debug_info && !force_direct) |
4705 | attr.dw_attr_val.val_entry |
4706 | = add_addr_table_entry (lbl_id, ate_kind_label); |
4707 | else |
4708 | attr.dw_attr_val.val_entry = NULL; |
4709 | add_dwarf_attr (die, attr: &attr); |
4710 | } |
4711 | |
4712 | /* Hash and equality functions for debug_str_hash. */ |
4713 | |
4714 | hashval_t |
4715 | indirect_string_hasher::hash (indirect_string_node *x) |
4716 | { |
4717 | return htab_hash_string (x->str); |
4718 | } |
4719 | |
4720 | bool |
4721 | indirect_string_hasher::equal (indirect_string_node *x1, const char *x2) |
4722 | { |
4723 | return strcmp (s1: x1->str, s2: x2) == 0; |
4724 | } |
4725 | |
4726 | /* Add STR to the given string hash table. */ |
4727 | |
4728 | static struct indirect_string_node * |
4729 | find_AT_string_in_table (const char *str, |
4730 | hash_table<indirect_string_hasher> *table, |
4731 | enum insert_option insert = INSERT) |
4732 | { |
4733 | struct indirect_string_node *node; |
4734 | |
4735 | indirect_string_node **slot |
4736 | = table->find_slot_with_hash (comparable: str, hash: htab_hash_string (str), insert); |
4737 | if (*slot == NULL) |
4738 | { |
4739 | node = ggc_cleared_alloc<indirect_string_node> (); |
4740 | node->str = ggc_strdup (str); |
4741 | *slot = node; |
4742 | } |
4743 | else |
4744 | node = *slot; |
4745 | |
4746 | node->refcount++; |
4747 | return node; |
4748 | } |
4749 | |
4750 | /* Add STR to the indirect string hash table. */ |
4751 | |
4752 | static struct indirect_string_node * |
4753 | find_AT_string (const char *str, enum insert_option insert = INSERT) |
4754 | { |
4755 | if (! debug_str_hash) |
4756 | debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
4757 | |
4758 | return find_AT_string_in_table (str, table: debug_str_hash, insert); |
4759 | } |
4760 | |
4761 | /* Add a string attribute value to a DIE. */ |
4762 | |
4763 | static inline void |
4764 | add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str) |
4765 | { |
4766 | dw_attr_node attr; |
4767 | struct indirect_string_node *node; |
4768 | |
4769 | node = find_AT_string (str); |
4770 | |
4771 | attr.dw_attr = attr_kind; |
4772 | attr.dw_attr_val.val_class = dw_val_class_str; |
4773 | attr.dw_attr_val.val_entry = NULL; |
4774 | attr.dw_attr_val.v.val_str = node; |
4775 | add_dwarf_attr (die, attr: &attr); |
4776 | } |
4777 | |
4778 | static inline const char * |
4779 | AT_string (dw_attr_node *a) |
4780 | { |
4781 | gcc_assert (a && AT_class (a) == dw_val_class_str); |
4782 | return a->dw_attr_val.v.val_str->str; |
4783 | } |
4784 | |
4785 | /* Call this function directly to bypass AT_string_form's logic to put |
4786 | the string inline in the die. */ |
4787 | |
4788 | static void |
4789 | set_indirect_string (struct indirect_string_node *node) |
4790 | { |
4791 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
4792 | /* Already indirect is a no op. */ |
4793 | if (node->form == DW_FORM_strp |
4794 | || node->form == DW_FORM_line_strp |
4795 | || node->form == dwarf_FORM (form: DW_FORM_strx)) |
4796 | { |
4797 | gcc_assert (node->label); |
4798 | return; |
4799 | } |
4800 | ASM_GENERATE_INTERNAL_LABEL (label, "LASF" , dw2_string_counter); |
4801 | ++dw2_string_counter; |
4802 | node->label = xstrdup (label); |
4803 | |
4804 | if (!dwarf_split_debug_info) |
4805 | { |
4806 | node->form = DW_FORM_strp; |
4807 | node->index = NOT_INDEXED; |
4808 | } |
4809 | else |
4810 | { |
4811 | node->form = dwarf_FORM (form: DW_FORM_strx); |
4812 | node->index = NO_INDEX_ASSIGNED; |
4813 | } |
4814 | } |
4815 | |
4816 | /* A helper function for dwarf2out_finish, called to reset indirect |
4817 | string decisions done for early LTO dwarf output before fat object |
4818 | dwarf output. */ |
4819 | |
4820 | int |
4821 | reset_indirect_string (indirect_string_node **h, void *) |
4822 | { |
4823 | struct indirect_string_node *node = *h; |
4824 | if (node->form == DW_FORM_strp |
4825 | || node->form == DW_FORM_line_strp |
4826 | || node->form == dwarf_FORM (form: DW_FORM_strx)) |
4827 | { |
4828 | free (ptr: node->label); |
4829 | node->label = NULL; |
4830 | node->form = (dwarf_form) 0; |
4831 | node->index = 0; |
4832 | } |
4833 | return 1; |
4834 | } |
4835 | |
4836 | /* Add a string representing a file or filepath attribute value to a DIE. */ |
4837 | |
4838 | static inline void |
4839 | add_filepath_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, |
4840 | const char *str) |
4841 | { |
4842 | if (! asm_outputs_debug_line_str ()) |
4843 | add_AT_string (die, attr_kind, str); |
4844 | else |
4845 | { |
4846 | dw_attr_node attr; |
4847 | struct indirect_string_node *node; |
4848 | |
4849 | if (!debug_line_str_hash) |
4850 | debug_line_str_hash |
4851 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
4852 | |
4853 | node = find_AT_string_in_table (str, table: debug_line_str_hash); |
4854 | set_indirect_string (node); |
4855 | node->form = DW_FORM_line_strp; |
4856 | |
4857 | attr.dw_attr = attr_kind; |
4858 | attr.dw_attr_val.val_class = dw_val_class_str; |
4859 | attr.dw_attr_val.val_entry = NULL; |
4860 | attr.dw_attr_val.v.val_str = node; |
4861 | add_dwarf_attr (die, attr: &attr); |
4862 | } |
4863 | } |
4864 | |
4865 | /* Find out whether a string should be output inline in DIE |
4866 | or out-of-line in .debug_str section. */ |
4867 | |
4868 | static enum dwarf_form |
4869 | find_string_form (struct indirect_string_node *node) |
4870 | { |
4871 | unsigned int len; |
4872 | |
4873 | if (node->form) |
4874 | return node->form; |
4875 | |
4876 | len = strlen (s: node->str) + 1; |
4877 | |
4878 | /* If the string is shorter or equal to the size of the reference, it is |
4879 | always better to put it inline. */ |
4880 | if (len <= (unsigned) dwarf_offset_size || node->refcount == 0) |
4881 | return node->form = DW_FORM_string; |
4882 | |
4883 | /* If we cannot expect the linker to merge strings in .debug_str |
4884 | section, only put it into .debug_str if it is worth even in this |
4885 | single module. */ |
4886 | if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
4887 | || ((debug_str_section->common.flags & SECTION_MERGE) == 0 |
4888 | && (len - dwarf_offset_size) * node->refcount <= len)) |
4889 | return node->form = DW_FORM_string; |
4890 | |
4891 | set_indirect_string (node); |
4892 | |
4893 | return node->form; |
4894 | } |
4895 | |
4896 | /* Find out whether the string referenced from the attribute should be |
4897 | output inline in DIE or out-of-line in .debug_str section. */ |
4898 | |
4899 | static enum dwarf_form |
4900 | AT_string_form (dw_attr_node *a) |
4901 | { |
4902 | gcc_assert (a && AT_class (a) == dw_val_class_str); |
4903 | return find_string_form (node: a->dw_attr_val.v.val_str); |
4904 | } |
4905 | |
4906 | /* Add a DIE reference attribute value to a DIE. */ |
4907 | |
4908 | static inline void |
4909 | add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die) |
4910 | { |
4911 | dw_attr_node attr; |
4912 | gcc_checking_assert (targ_die != NULL); |
4913 | gcc_assert (targ_die != die |
4914 | || (attr_kind != DW_AT_abstract_origin |
4915 | && attr_kind != DW_AT_specification)); |
4916 | |
4917 | /* With LTO we can end up trying to reference something we didn't create |
4918 | a DIE for. Avoid crashing later on a NULL referenced DIE. */ |
4919 | if (targ_die == NULL) |
4920 | return; |
4921 | |
4922 | attr.dw_attr = attr_kind; |
4923 | attr.dw_attr_val.val_class = dw_val_class_die_ref; |
4924 | attr.dw_attr_val.val_entry = NULL; |
4925 | attr.dw_attr_val.v.val_die_ref.die = targ_die; |
4926 | attr.dw_attr_val.v.val_die_ref.external = 0; |
4927 | add_dwarf_attr (die, attr: &attr); |
4928 | } |
4929 | |
4930 | /* Change DIE reference REF to point to NEW_DIE instead. */ |
4931 | |
4932 | static inline void |
4933 | change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die) |
4934 | { |
4935 | gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref); |
4936 | ref->dw_attr_val.v.val_die_ref.die = new_die; |
4937 | ref->dw_attr_val.v.val_die_ref.external = 0; |
4938 | } |
4939 | |
4940 | /* Add an AT_specification attribute to a DIE, and also make the back |
4941 | pointer from the specification to the definition. */ |
4942 | |
4943 | static inline void |
4944 | add_AT_specification (dw_die_ref die, dw_die_ref targ_die) |
4945 | { |
4946 | add_AT_die_ref (die, attr_kind: DW_AT_specification, targ_die); |
4947 | gcc_assert (!targ_die->die_definition); |
4948 | targ_die->die_definition = die; |
4949 | } |
4950 | |
4951 | static inline dw_die_ref |
4952 | AT_ref (dw_attr_node *a) |
4953 | { |
4954 | gcc_assert (a && AT_class (a) == dw_val_class_die_ref); |
4955 | return a->dw_attr_val.v.val_die_ref.die; |
4956 | } |
4957 | |
4958 | static inline int |
4959 | AT_ref_external (dw_attr_node *a) |
4960 | { |
4961 | if (a && AT_class (a) == dw_val_class_die_ref) |
4962 | return a->dw_attr_val.v.val_die_ref.external; |
4963 | |
4964 | return 0; |
4965 | } |
4966 | |
4967 | static inline void |
4968 | set_AT_ref_external (dw_attr_node *a, int i) |
4969 | { |
4970 | gcc_assert (a && AT_class (a) == dw_val_class_die_ref); |
4971 | a->dw_attr_val.v.val_die_ref.external = i; |
4972 | } |
4973 | |
4974 | /* Add a location description attribute value to a DIE. */ |
4975 | |
4976 | static inline void |
4977 | add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc) |
4978 | { |
4979 | dw_attr_node attr; |
4980 | |
4981 | attr.dw_attr = attr_kind; |
4982 | attr.dw_attr_val.val_class = dw_val_class_loc; |
4983 | attr.dw_attr_val.val_entry = NULL; |
4984 | attr.dw_attr_val.v.val_loc = loc; |
4985 | add_dwarf_attr (die, attr: &attr); |
4986 | } |
4987 | |
4988 | dw_loc_descr_ref |
4989 | AT_loc (dw_attr_node *a) |
4990 | { |
4991 | gcc_assert (a && AT_class (a) == dw_val_class_loc); |
4992 | return a->dw_attr_val.v.val_loc; |
4993 | } |
4994 | |
4995 | static inline void |
4996 | add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list) |
4997 | { |
4998 | dw_attr_node attr; |
4999 | |
5000 | if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS) |
5001 | return; |
5002 | |
5003 | attr.dw_attr = attr_kind; |
5004 | attr.dw_attr_val.val_class = dw_val_class_loc_list; |
5005 | attr.dw_attr_val.val_entry = NULL; |
5006 | attr.dw_attr_val.v.val_loc_list = loc_list; |
5007 | add_dwarf_attr (die, attr: &attr); |
5008 | have_location_lists = true; |
5009 | } |
5010 | |
5011 | static inline dw_loc_list_ref |
5012 | AT_loc_list (dw_attr_node *a) |
5013 | { |
5014 | gcc_assert (a && AT_class (a) == dw_val_class_loc_list); |
5015 | return a->dw_attr_val.v.val_loc_list; |
5016 | } |
5017 | |
5018 | /* Add a view list attribute to DIE. It must have a DW_AT_location |
5019 | attribute, because the view list complements the location list. */ |
5020 | |
5021 | static inline void |
5022 | add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind) |
5023 | { |
5024 | dw_attr_node attr; |
5025 | |
5026 | if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS) |
5027 | return; |
5028 | |
5029 | attr.dw_attr = attr_kind; |
5030 | attr.dw_attr_val.val_class = dw_val_class_view_list; |
5031 | attr.dw_attr_val.val_entry = NULL; |
5032 | attr.dw_attr_val.v.val_view_list = die; |
5033 | add_dwarf_attr (die, attr: &attr); |
5034 | gcc_checking_assert (get_AT (die, DW_AT_location)); |
5035 | gcc_assert (have_location_lists); |
5036 | } |
5037 | |
5038 | /* Return a pointer to the location list referenced by the attribute. |
5039 | If the named attribute is a view list, look up the corresponding |
5040 | DW_AT_location attribute and return its location list. */ |
5041 | |
5042 | static inline dw_loc_list_ref * |
5043 | AT_loc_list_ptr (dw_attr_node *a) |
5044 | { |
5045 | gcc_assert (a); |
5046 | switch (AT_class (a)) |
5047 | { |
5048 | case dw_val_class_loc_list: |
5049 | return &a->dw_attr_val.v.val_loc_list; |
5050 | case dw_val_class_view_list: |
5051 | { |
5052 | dw_attr_node *l; |
5053 | l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location); |
5054 | if (!l) |
5055 | return NULL; |
5056 | gcc_checking_assert (l + 1 == a); |
5057 | return AT_loc_list_ptr (a: l); |
5058 | } |
5059 | default: |
5060 | gcc_unreachable (); |
5061 | } |
5062 | } |
5063 | |
5064 | /* Return the location attribute value associated with a view list |
5065 | attribute value. */ |
5066 | |
5067 | static inline dw_val_node * |
5068 | view_list_to_loc_list_val_node (dw_val_node *val) |
5069 | { |
5070 | gcc_assert (val->val_class == dw_val_class_view_list); |
5071 | dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location); |
5072 | if (!loc) |
5073 | return NULL; |
5074 | gcc_checking_assert (&(loc + 1)->dw_attr_val == val); |
5075 | gcc_assert (AT_class (loc) == dw_val_class_loc_list); |
5076 | return &loc->dw_attr_val; |
5077 | } |
5078 | |
5079 | struct addr_hasher : ggc_ptr_hash<addr_table_entry> |
5080 | { |
5081 | static hashval_t hash (addr_table_entry *); |
5082 | static bool equal (addr_table_entry *, addr_table_entry *); |
5083 | }; |
5084 | |
5085 | /* Table of entries into the .debug_addr section. */ |
5086 | |
5087 | static GTY (()) hash_table<addr_hasher> *addr_index_table; |
5088 | |
5089 | /* Hash an address_table_entry. */ |
5090 | |
5091 | hashval_t |
5092 | addr_hasher::hash (addr_table_entry *a) |
5093 | { |
5094 | inchash::hash hstate; |
5095 | switch (a->kind) |
5096 | { |
5097 | case ate_kind_rtx: |
5098 | hstate.add_int (v: 0); |
5099 | break; |
5100 | case ate_kind_rtx_dtprel: |
5101 | hstate.add_int (v: 1); |
5102 | break; |
5103 | case ate_kind_label: |
5104 | return htab_hash_string (a->addr.label); |
5105 | default: |
5106 | gcc_unreachable (); |
5107 | } |
5108 | inchash::add_rtx (a->addr.rtl, hstate); |
5109 | return hstate.end (); |
5110 | } |
5111 | |
5112 | /* Determine equality for two address_table_entries. */ |
5113 | |
5114 | bool |
5115 | addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2) |
5116 | { |
5117 | if (a1->kind != a2->kind) |
5118 | return false; |
5119 | switch (a1->kind) |
5120 | { |
5121 | case ate_kind_rtx: |
5122 | case ate_kind_rtx_dtprel: |
5123 | return rtx_equal_p (a1->addr.rtl, a2->addr.rtl); |
5124 | case ate_kind_label: |
5125 | return strcmp (s1: a1->addr.label, s2: a2->addr.label) == 0; |
5126 | default: |
5127 | gcc_unreachable (); |
5128 | } |
5129 | } |
5130 | |
5131 | /* Initialize an addr_table_entry. */ |
5132 | |
5133 | void |
5134 | init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr) |
5135 | { |
5136 | e->kind = kind; |
5137 | switch (kind) |
5138 | { |
5139 | case ate_kind_rtx: |
5140 | case ate_kind_rtx_dtprel: |
5141 | e->addr.rtl = (rtx) addr; |
5142 | break; |
5143 | case ate_kind_label: |
5144 | e->addr.label = (char *) addr; |
5145 | break; |
5146 | } |
5147 | e->refcount = 0; |
5148 | e->index = NO_INDEX_ASSIGNED; |
5149 | } |
5150 | |
5151 | /* Add attr to the address table entry to the table. Defer setting an |
5152 | index until output time. */ |
5153 | |
5154 | static addr_table_entry * |
5155 | add_addr_table_entry (void *addr, enum ate_kind kind) |
5156 | { |
5157 | addr_table_entry *node; |
5158 | addr_table_entry finder; |
5159 | |
5160 | gcc_assert (dwarf_split_debug_info); |
5161 | if (! addr_index_table) |
5162 | addr_index_table = hash_table<addr_hasher>::create_ggc (n: 10); |
5163 | init_addr_table_entry (e: &finder, kind, addr); |
5164 | addr_table_entry **slot = addr_index_table->find_slot (value: &finder, insert: INSERT); |
5165 | |
5166 | if (*slot == HTAB_EMPTY_ENTRY) |
5167 | { |
5168 | node = ggc_cleared_alloc<addr_table_entry> (); |
5169 | init_addr_table_entry (e: node, kind, addr); |
5170 | *slot = node; |
5171 | } |
5172 | else |
5173 | node = *slot; |
5174 | |
5175 | node->refcount++; |
5176 | return node; |
5177 | } |
5178 | |
5179 | /* Remove an entry from the addr table by decrementing its refcount. |
5180 | Strictly, decrementing the refcount would be enough, but the |
5181 | assertion that the entry is actually in the table has found |
5182 | bugs. */ |
5183 | |
5184 | static void |
5185 | remove_addr_table_entry (addr_table_entry *entry) |
5186 | { |
5187 | gcc_assert (dwarf_split_debug_info && addr_index_table); |
5188 | /* After an index is assigned, the table is frozen. */ |
5189 | gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED); |
5190 | entry->refcount--; |
5191 | } |
5192 | |
5193 | /* Given a location list, remove all addresses it refers to from the |
5194 | address_table. */ |
5195 | |
5196 | static void |
5197 | remove_loc_list_addr_table_entries (dw_loc_descr_ref descr) |
5198 | { |
5199 | for (; descr; descr = descr->dw_loc_next) |
5200 | if (descr->dw_loc_oprnd1.val_entry != NULL) |
5201 | { |
5202 | gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED); |
5203 | remove_addr_table_entry (entry: descr->dw_loc_oprnd1.val_entry); |
5204 | } |
5205 | } |
5206 | |
5207 | /* A helper function for dwarf2out_finish called through |
5208 | htab_traverse. Assign an addr_table_entry its index. All entries |
5209 | must be collected into the table when this function is called, |
5210 | because the indexing code relies on htab_traverse to traverse nodes |
5211 | in the same order for each run. */ |
5212 | |
5213 | int |
5214 | index_addr_table_entry (addr_table_entry **h, unsigned int *index) |
5215 | { |
5216 | addr_table_entry *node = *h; |
5217 | |
5218 | /* Don't index unreferenced nodes. */ |
5219 | if (node->refcount == 0) |
5220 | return 1; |
5221 | |
5222 | gcc_assert (node->index == NO_INDEX_ASSIGNED); |
5223 | node->index = *index; |
5224 | *index += 1; |
5225 | |
5226 | return 1; |
5227 | } |
5228 | |
5229 | /* Return the tag of a given DIE. */ |
5230 | |
5231 | enum dwarf_tag |
5232 | dw_get_die_tag (dw_die_ref die) |
5233 | { |
5234 | return die->die_tag; |
5235 | } |
5236 | |
5237 | /* Return a reference to the children list of a given DIE. */ |
5238 | |
5239 | dw_die_ref |
5240 | dw_get_die_child (dw_die_ref die) |
5241 | { |
5242 | return die->die_child; |
5243 | } |
5244 | |
5245 | /* Return a reference to the sibling of a given DIE. */ |
5246 | |
5247 | dw_die_ref |
5248 | dw_get_die_sib (dw_die_ref die) |
5249 | { |
5250 | return die->die_sib; |
5251 | } |
5252 | |
5253 | /* Add an address constant attribute value to a DIE. When using |
5254 | dwarf_split_debug_info, address attributes in dies destined for the |
5255 | final executable should be direct references--setting the parameter |
5256 | force_direct ensures this behavior. */ |
5257 | |
5258 | static inline void |
5259 | add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr, |
5260 | bool force_direct) |
5261 | { |
5262 | dw_attr_node attr; |
5263 | |
5264 | attr.dw_attr = attr_kind; |
5265 | attr.dw_attr_val.val_class = dw_val_class_addr; |
5266 | attr.dw_attr_val.v.val_addr = addr; |
5267 | if (dwarf_split_debug_info && !force_direct) |
5268 | attr.dw_attr_val.val_entry = add_addr_table_entry (addr, kind: ate_kind_rtx); |
5269 | else |
5270 | attr.dw_attr_val.val_entry = NULL; |
5271 | add_dwarf_attr (die, attr: &attr); |
5272 | } |
5273 | |
5274 | /* Get the RTX from to an address DIE attribute. */ |
5275 | |
5276 | static inline rtx |
5277 | AT_addr (dw_attr_node *a) |
5278 | { |
5279 | gcc_assert (a && AT_class (a) == dw_val_class_addr); |
5280 | return a->dw_attr_val.v.val_addr; |
5281 | } |
5282 | |
5283 | /* Add a file attribute value to a DIE. */ |
5284 | |
5285 | static inline void |
5286 | add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind, |
5287 | struct dwarf_file_data *fd) |
5288 | { |
5289 | dw_attr_node attr; |
5290 | |
5291 | attr.dw_attr = attr_kind; |
5292 | attr.dw_attr_val.val_class = dw_val_class_file; |
5293 | attr.dw_attr_val.val_entry = NULL; |
5294 | attr.dw_attr_val.v.val_file = fd; |
5295 | add_dwarf_attr (die, attr: &attr); |
5296 | } |
5297 | |
5298 | /* Get the dwarf_file_data from a file DIE attribute. */ |
5299 | |
5300 | static inline struct dwarf_file_data * |
5301 | AT_file (dw_attr_node *a) |
5302 | { |
5303 | gcc_assert (a && (AT_class (a) == dw_val_class_file |
5304 | || AT_class (a) == dw_val_class_file_implicit)); |
5305 | return a->dw_attr_val.v.val_file; |
5306 | } |
5307 | |
5308 | #if VMS_DEBUGGING_INFO |
5309 | /* Add a vms delta attribute value to a DIE. */ |
5310 | |
5311 | static inline void |
5312 | add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind, |
5313 | const char *lbl1, const char *lbl2) |
5314 | { |
5315 | dw_attr_node attr; |
5316 | |
5317 | attr.dw_attr = attr_kind; |
5318 | attr.dw_attr_val.val_class = dw_val_class_vms_delta; |
5319 | attr.dw_attr_val.val_entry = NULL; |
5320 | attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1); |
5321 | attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2); |
5322 | add_dwarf_attr (die, &attr); |
5323 | } |
5324 | #endif |
5325 | |
5326 | /* Add a symbolic view identifier attribute value to a DIE. */ |
5327 | |
5328 | static inline void |
5329 | add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind, |
5330 | const char *view_label) |
5331 | { |
5332 | dw_attr_node attr; |
5333 | |
5334 | attr.dw_attr = attr_kind; |
5335 | attr.dw_attr_val.val_class = dw_val_class_symview; |
5336 | attr.dw_attr_val.val_entry = NULL; |
5337 | attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label); |
5338 | add_dwarf_attr (die, attr: &attr); |
5339 | } |
5340 | |
5341 | /* Add a label identifier attribute value to a DIE. */ |
5342 | |
5343 | static inline void |
5344 | add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind, |
5345 | const char *lbl_id) |
5346 | { |
5347 | dw_attr_node attr; |
5348 | |
5349 | attr.dw_attr = attr_kind; |
5350 | attr.dw_attr_val.val_class = dw_val_class_lbl_id; |
5351 | attr.dw_attr_val.val_entry = NULL; |
5352 | attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id); |
5353 | if (dwarf_split_debug_info) |
5354 | attr.dw_attr_val.val_entry |
5355 | = add_addr_table_entry (addr: attr.dw_attr_val.v.val_lbl_id, |
5356 | kind: ate_kind_label); |
5357 | add_dwarf_attr (die, attr: &attr); |
5358 | } |
5359 | |
5360 | /* Add a section offset attribute value to a DIE, an offset into the |
5361 | debug_line section. */ |
5362 | |
5363 | static inline void |
5364 | add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind, |
5365 | const char *label) |
5366 | { |
5367 | dw_attr_node attr; |
5368 | |
5369 | attr.dw_attr = attr_kind; |
5370 | attr.dw_attr_val.val_class = dw_val_class_lineptr; |
5371 | attr.dw_attr_val.val_entry = NULL; |
5372 | attr.dw_attr_val.v.val_lbl_id = xstrdup (label); |
5373 | add_dwarf_attr (die, attr: &attr); |
5374 | } |
5375 | |
5376 | /* Add a section offset attribute value to a DIE, an offset into the |
5377 | debug_macinfo section. */ |
5378 | |
5379 | static inline void |
5380 | add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind, |
5381 | const char *label) |
5382 | { |
5383 | dw_attr_node attr; |
5384 | |
5385 | attr.dw_attr = attr_kind; |
5386 | attr.dw_attr_val.val_class = dw_val_class_macptr; |
5387 | attr.dw_attr_val.val_entry = NULL; |
5388 | attr.dw_attr_val.v.val_lbl_id = xstrdup (label); |
5389 | add_dwarf_attr (die, attr: &attr); |
5390 | } |
5391 | |
5392 | /* Add a range_list attribute value to a DIE. When using |
5393 | dwarf_split_debug_info, address attributes in dies destined for the |
5394 | final executable should be direct references--setting the parameter |
5395 | force_direct ensures this behavior. */ |
5396 | |
5397 | #define UNRELOCATED_OFFSET ((addr_table_entry *) 1) |
5398 | #define RELOCATED_OFFSET (NULL) |
5399 | |
5400 | static void |
5401 | add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind, |
5402 | long unsigned int offset, bool force_direct) |
5403 | { |
5404 | dw_attr_node attr; |
5405 | |
5406 | attr.dw_attr = attr_kind; |
5407 | attr.dw_attr_val.val_class = dw_val_class_range_list; |
5408 | /* For the range_list attribute, use val_entry to store whether the |
5409 | offset should follow split-debug-info or normal semantics. This |
5410 | value is read in output_range_list_offset. */ |
5411 | if (dwarf_split_debug_info && !force_direct) |
5412 | attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET; |
5413 | else |
5414 | attr.dw_attr_val.val_entry = RELOCATED_OFFSET; |
5415 | attr.dw_attr_val.v.val_offset = offset; |
5416 | add_dwarf_attr (die, attr: &attr); |
5417 | } |
5418 | |
5419 | /* Return the start label of a delta attribute. */ |
5420 | |
5421 | static inline const char * |
5422 | AT_vms_delta1 (dw_attr_node *a) |
5423 | { |
5424 | gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta)); |
5425 | return a->dw_attr_val.v.val_vms_delta.lbl1; |
5426 | } |
5427 | |
5428 | /* Return the end label of a delta attribute. */ |
5429 | |
5430 | static inline const char * |
5431 | AT_vms_delta2 (dw_attr_node *a) |
5432 | { |
5433 | gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta)); |
5434 | return a->dw_attr_val.v.val_vms_delta.lbl2; |
5435 | } |
5436 | |
5437 | static inline const char * |
5438 | AT_lbl (dw_attr_node *a) |
5439 | { |
5440 | gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id |
5441 | || AT_class (a) == dw_val_class_lineptr |
5442 | || AT_class (a) == dw_val_class_macptr |
5443 | || AT_class (a) == dw_val_class_loclistsptr |
5444 | || AT_class (a) == dw_val_class_high_pc)); |
5445 | return a->dw_attr_val.v.val_lbl_id; |
5446 | } |
5447 | |
5448 | /* Get the attribute of type attr_kind. */ |
5449 | |
5450 | dw_attr_node * |
5451 | get_AT (dw_die_ref die, enum dwarf_attribute attr_kind) |
5452 | { |
5453 | dw_attr_node *a; |
5454 | unsigned ix; |
5455 | dw_die_ref spec = NULL; |
5456 | |
5457 | if (! die) |
5458 | return NULL; |
5459 | |
5460 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
5461 | if (a->dw_attr == attr_kind) |
5462 | return a; |
5463 | else if (a->dw_attr == DW_AT_specification |
5464 | || a->dw_attr == DW_AT_abstract_origin) |
5465 | spec = AT_ref (a); |
5466 | |
5467 | if (spec) |
5468 | return get_AT (die: spec, attr_kind); |
5469 | |
5470 | return NULL; |
5471 | } |
5472 | |
5473 | /* Returns the parent of the declaration of DIE. */ |
5474 | |
5475 | static dw_die_ref |
5476 | get_die_parent (dw_die_ref die) |
5477 | { |
5478 | dw_die_ref t; |
5479 | |
5480 | if (!die) |
5481 | return NULL; |
5482 | |
5483 | if ((t = get_AT_ref (die, DW_AT_abstract_origin)) |
5484 | || (t = get_AT_ref (die, DW_AT_specification))) |
5485 | die = t; |
5486 | |
5487 | return die->die_parent; |
5488 | } |
5489 | |
5490 | /* Return the "low pc" attribute value, typically associated with a subprogram |
5491 | DIE. Return null if the "low pc" attribute is either not present, or if it |
5492 | cannot be represented as an assembler label identifier. */ |
5493 | |
5494 | static inline const char * |
5495 | get_AT_low_pc (dw_die_ref die) |
5496 | { |
5497 | dw_attr_node *a = get_AT (die, attr_kind: DW_AT_low_pc); |
5498 | |
5499 | return a ? AT_lbl (a) : NULL; |
5500 | } |
5501 | |
5502 | /* Return the value of the string attribute designated by ATTR_KIND, or |
5503 | NULL if it is not present. */ |
5504 | |
5505 | const char * |
5506 | get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind) |
5507 | { |
5508 | dw_attr_node *a = get_AT (die, attr_kind); |
5509 | |
5510 | return a ? AT_string (a) : NULL; |
5511 | } |
5512 | |
5513 | /* Return the value of the flag attribute designated by ATTR_KIND, or -1 |
5514 | if it is not present. */ |
5515 | |
5516 | int |
5517 | get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind) |
5518 | { |
5519 | dw_attr_node *a = get_AT (die, attr_kind); |
5520 | |
5521 | return a ? AT_flag (a) : 0; |
5522 | } |
5523 | |
5524 | /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0 |
5525 | if it is not present. */ |
5526 | |
5527 | unsigned |
5528 | get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind) |
5529 | { |
5530 | dw_attr_node *a = get_AT (die, attr_kind); |
5531 | |
5532 | return a ? AT_unsigned (a) : 0; |
5533 | } |
5534 | |
5535 | dw_die_ref |
5536 | get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind) |
5537 | { |
5538 | dw_attr_node *a = get_AT (die, attr_kind); |
5539 | |
5540 | return a ? AT_ref (a) : NULL; |
5541 | } |
5542 | |
5543 | struct dwarf_file_data * |
5544 | get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind) |
5545 | { |
5546 | dw_attr_node *a = get_AT (die, attr_kind); |
5547 | |
5548 | return a ? AT_file (a) : NULL; |
5549 | } |
5550 | |
5551 | /* Return TRUE if the language is C. */ |
5552 | |
5553 | static inline bool |
5554 | is_c (void) |
5555 | { |
5556 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5557 | |
5558 | return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99 |
5559 | || lang == DW_LANG_C11 || lang == DW_LANG_ObjC); |
5560 | |
5561 | |
5562 | } |
5563 | |
5564 | /* Return TRUE if the language is C++. */ |
5565 | |
5566 | static inline bool |
5567 | is_cxx (void) |
5568 | { |
5569 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5570 | |
5571 | return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus |
5572 | || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14); |
5573 | } |
5574 | |
5575 | /* Return TRUE if DECL was created by the C++ frontend. */ |
5576 | |
5577 | static bool |
5578 | is_cxx (const_tree decl) |
5579 | { |
5580 | if (in_lto_p) |
5581 | { |
5582 | const_tree context = get_ultimate_context (decl); |
5583 | if (context && TRANSLATION_UNIT_LANGUAGE (context)) |
5584 | return startswith (TRANSLATION_UNIT_LANGUAGE (context), prefix: "GNU C++" ); |
5585 | } |
5586 | return is_cxx (); |
5587 | } |
5588 | |
5589 | /* Return TRUE if the language is Fortran. */ |
5590 | |
5591 | static inline bool |
5592 | is_fortran (void) |
5593 | { |
5594 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5595 | |
5596 | return (lang == DW_LANG_Fortran77 |
5597 | || lang == DW_LANG_Fortran90 |
5598 | || lang == DW_LANG_Fortran95 |
5599 | || lang == DW_LANG_Fortran03 |
5600 | || lang == DW_LANG_Fortran08); |
5601 | } |
5602 | |
5603 | static inline bool |
5604 | is_fortran (const_tree decl) |
5605 | { |
5606 | if (in_lto_p) |
5607 | { |
5608 | const_tree context = get_ultimate_context (decl); |
5609 | if (context && TRANSLATION_UNIT_LANGUAGE (context)) |
5610 | return (strncmp (TRANSLATION_UNIT_LANGUAGE (context), |
5611 | s2: "GNU Fortran" , n: 11) == 0 |
5612 | || strcmp (TRANSLATION_UNIT_LANGUAGE (context), |
5613 | s2: "GNU F77" ) == 0); |
5614 | } |
5615 | return is_fortran (); |
5616 | } |
5617 | |
5618 | /* Return TRUE if the language is Rust. |
5619 | Note, returns FALSE for dwarf_version < 5 && dwarf_strict. */ |
5620 | |
5621 | static inline bool |
5622 | is_rust (void) |
5623 | { |
5624 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5625 | |
5626 | return lang == DW_LANG_Rust; |
5627 | } |
5628 | |
5629 | /* Return TRUE if the language is Ada. */ |
5630 | |
5631 | static inline bool |
5632 | is_ada (void) |
5633 | { |
5634 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5635 | |
5636 | return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83; |
5637 | } |
5638 | |
5639 | /* Return TRUE if the language is D. */ |
5640 | |
5641 | static inline bool |
5642 | is_dlang (void) |
5643 | { |
5644 | unsigned int lang = get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language); |
5645 | |
5646 | return lang == DW_LANG_D; |
5647 | } |
5648 | |
5649 | /* Remove the specified attribute if present. Return TRUE if removal |
5650 | was successful. */ |
5651 | |
5652 | static bool |
5653 | remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind) |
5654 | { |
5655 | dw_attr_node *a; |
5656 | unsigned ix; |
5657 | |
5658 | if (! die) |
5659 | return false; |
5660 | |
5661 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
5662 | if (a->dw_attr == attr_kind) |
5663 | { |
5664 | if (AT_class (a) == dw_val_class_str) |
5665 | if (a->dw_attr_val.v.val_str->refcount) |
5666 | a->dw_attr_val.v.val_str->refcount--; |
5667 | |
5668 | /* vec::ordered_remove should help reduce the number of abbrevs |
5669 | that are needed. */ |
5670 | die->die_attr->ordered_remove (ix); |
5671 | return true; |
5672 | } |
5673 | return false; |
5674 | } |
5675 | |
5676 | /* Remove CHILD from its parent. PREV must have the property that |
5677 | PREV->DIE_SIB == CHILD. Does not alter CHILD. */ |
5678 | |
5679 | static void |
5680 | remove_child_with_prev (dw_die_ref child, dw_die_ref prev) |
5681 | { |
5682 | gcc_assert (child->die_parent == prev->die_parent); |
5683 | gcc_assert (prev->die_sib == child); |
5684 | if (prev == child) |
5685 | { |
5686 | gcc_assert (child->die_parent->die_child == child); |
5687 | prev = NULL; |
5688 | } |
5689 | else |
5690 | prev->die_sib = child->die_sib; |
5691 | if (child->die_parent->die_child == child) |
5692 | child->die_parent->die_child = prev; |
5693 | child->die_sib = NULL; |
5694 | } |
5695 | |
5696 | /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that |
5697 | PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */ |
5698 | |
5699 | static void |
5700 | replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev) |
5701 | { |
5702 | dw_die_ref parent = old_child->die_parent; |
5703 | |
5704 | gcc_assert (parent == prev->die_parent); |
5705 | gcc_assert (prev->die_sib == old_child); |
5706 | |
5707 | new_child->die_parent = parent; |
5708 | if (prev == old_child) |
5709 | { |
5710 | gcc_assert (parent->die_child == old_child); |
5711 | new_child->die_sib = new_child; |
5712 | } |
5713 | else |
5714 | { |
5715 | prev->die_sib = new_child; |
5716 | new_child->die_sib = old_child->die_sib; |
5717 | } |
5718 | if (old_child->die_parent->die_child == old_child) |
5719 | old_child->die_parent->die_child = new_child; |
5720 | old_child->die_sib = NULL; |
5721 | } |
5722 | |
5723 | /* Move all children from OLD_PARENT to NEW_PARENT. */ |
5724 | |
5725 | static void |
5726 | move_all_children (dw_die_ref old_parent, dw_die_ref new_parent) |
5727 | { |
5728 | dw_die_ref c; |
5729 | new_parent->die_child = old_parent->die_child; |
5730 | old_parent->die_child = NULL; |
5731 | FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent); |
5732 | } |
5733 | |
5734 | /* Remove child DIE whose die_tag is TAG. Do nothing if no child |
5735 | matches TAG. */ |
5736 | |
5737 | static void |
5738 | remove_child_TAG (dw_die_ref die, enum dwarf_tag tag) |
5739 | { |
5740 | dw_die_ref c; |
5741 | |
5742 | c = die->die_child; |
5743 | if (c) do { |
5744 | dw_die_ref prev = c; |
5745 | c = c->die_sib; |
5746 | while (c->die_tag == tag) |
5747 | { |
5748 | remove_child_with_prev (child: c, prev); |
5749 | c->die_parent = NULL; |
5750 | /* Might have removed every child. */ |
5751 | if (die->die_child == NULL) |
5752 | return; |
5753 | c = prev->die_sib; |
5754 | } |
5755 | } while (c != die->die_child); |
5756 | } |
5757 | |
5758 | /* Add a CHILD_DIE as the last child of DIE. */ |
5759 | |
5760 | static void |
5761 | add_child_die (dw_die_ref die, dw_die_ref child_die) |
5762 | { |
5763 | /* FIXME this should probably be an assert. */ |
5764 | if (! die || ! child_die) |
5765 | return; |
5766 | gcc_assert (die != child_die); |
5767 | |
5768 | child_die->die_parent = die; |
5769 | if (die->die_child) |
5770 | { |
5771 | child_die->die_sib = die->die_child->die_sib; |
5772 | die->die_child->die_sib = child_die; |
5773 | } |
5774 | else |
5775 | child_die->die_sib = child_die; |
5776 | die->die_child = child_die; |
5777 | } |
5778 | |
5779 | /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */ |
5780 | |
5781 | static void |
5782 | add_child_die_after (dw_die_ref die, dw_die_ref child_die, |
5783 | dw_die_ref after_die) |
5784 | { |
5785 | gcc_assert (die |
5786 | && child_die |
5787 | && after_die |
5788 | && die->die_child |
5789 | && die != child_die); |
5790 | |
5791 | child_die->die_parent = die; |
5792 | child_die->die_sib = after_die->die_sib; |
5793 | after_die->die_sib = child_die; |
5794 | if (die->die_child == after_die) |
5795 | die->die_child = child_die; |
5796 | } |
5797 | |
5798 | /* Unassociate CHILD from its parent, and make its parent be |
5799 | NEW_PARENT. */ |
5800 | |
5801 | static void |
5802 | reparent_child (dw_die_ref child, dw_die_ref new_parent) |
5803 | { |
5804 | for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib) |
5805 | if (p->die_sib == child) |
5806 | { |
5807 | remove_child_with_prev (child, prev: p); |
5808 | break; |
5809 | } |
5810 | add_child_die (die: new_parent, child_die: child); |
5811 | } |
5812 | |
5813 | /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT |
5814 | is the specification, to the end of PARENT's list of children. |
5815 | This is done by removing and re-adding it. */ |
5816 | |
5817 | static void |
5818 | splice_child_die (dw_die_ref parent, dw_die_ref child) |
5819 | { |
5820 | /* We want the declaration DIE from inside the class, not the |
5821 | specification DIE at toplevel. */ |
5822 | if (child->die_parent != parent) |
5823 | { |
5824 | dw_die_ref tmp = get_AT_ref (die: child, attr_kind: DW_AT_specification); |
5825 | |
5826 | if (tmp) |
5827 | child = tmp; |
5828 | } |
5829 | |
5830 | gcc_assert (child->die_parent == parent |
5831 | || (child->die_parent |
5832 | == get_AT_ref (parent, DW_AT_specification))); |
5833 | |
5834 | reparent_child (child, new_parent: parent); |
5835 | } |
5836 | |
5837 | /* Create and return a new die with TAG_VALUE as tag. */ |
5838 | |
5839 | dw_die_ref |
5840 | new_die_raw (enum dwarf_tag tag_value) |
5841 | { |
5842 | dw_die_ref die = ggc_cleared_alloc<die_node> (); |
5843 | die->die_tag = tag_value; |
5844 | return die; |
5845 | } |
5846 | |
5847 | /* Create and return a new die with a parent of PARENT_DIE. If |
5848 | PARENT_DIE is NULL, the new DIE is placed in limbo and an |
5849 | associated tree T must be supplied to determine parenthood |
5850 | later. */ |
5851 | |
5852 | static inline dw_die_ref |
5853 | new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t) |
5854 | { |
5855 | dw_die_ref die = new_die_raw (tag_value); |
5856 | |
5857 | if (parent_die != NULL) |
5858 | add_child_die (die: parent_die, child_die: die); |
5859 | else |
5860 | { |
5861 | limbo_die_node *limbo_node; |
5862 | |
5863 | /* No DIEs created after early dwarf should end up in limbo, |
5864 | because the limbo list should not persist past LTO |
5865 | streaming. */ |
5866 | if (tag_value != DW_TAG_compile_unit |
5867 | /* These are allowed because they're generated while |
5868 | breaking out COMDAT units late. */ |
5869 | && tag_value != DW_TAG_type_unit |
5870 | && tag_value != DW_TAG_skeleton_unit |
5871 | && !early_dwarf |
5872 | /* Allow nested functions to live in limbo because they will |
5873 | only temporarily live there, as decls_for_scope will fix |
5874 | them up. */ |
5875 | && (TREE_CODE (t) != FUNCTION_DECL |
5876 | || !decl_function_context (t)) |
5877 | /* Same as nested functions above but for types. Types that |
5878 | are local to a function will be fixed in |
5879 | decls_for_scope. */ |
5880 | && (!RECORD_OR_UNION_TYPE_P (t) |
5881 | || !TYPE_CONTEXT (t) |
5882 | || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL) |
5883 | /* FIXME debug-early: Allow late limbo DIE creation for LTO, |
5884 | especially in the ltrans stage, but once we implement LTO |
5885 | dwarf streaming, we should remove this exception. */ |
5886 | && !in_lto_p) |
5887 | { |
5888 | fprintf (stderr, format: "symbol ended up in limbo too late:" ); |
5889 | debug_generic_stmt (t); |
5890 | gcc_unreachable (); |
5891 | } |
5892 | |
5893 | limbo_node = ggc_cleared_alloc<limbo_die_node> (); |
5894 | limbo_node->die = die; |
5895 | limbo_node->created_for = t; |
5896 | limbo_node->next = limbo_die_list; |
5897 | limbo_die_list = limbo_node; |
5898 | } |
5899 | |
5900 | return die; |
5901 | } |
5902 | |
5903 | /* Return the DIE associated with the given type specifier. */ |
5904 | |
5905 | dw_die_ref |
5906 | lookup_type_die (tree type) |
5907 | { |
5908 | dw_die_ref die = TYPE_SYMTAB_DIE (type); |
5909 | if (die && die->removed) |
5910 | { |
5911 | TYPE_SYMTAB_DIE (type) = NULL; |
5912 | TREE_ASM_WRITTEN (type) = 0; |
5913 | return NULL; |
5914 | } |
5915 | return die; |
5916 | } |
5917 | |
5918 | /* Given a TYPE_DIE representing the type TYPE, if TYPE is an |
5919 | anonymous type named by the typedef TYPE_DIE, return the DIE of the |
5920 | anonymous type instead the one of the naming typedef. */ |
5921 | |
5922 | static inline dw_die_ref |
5923 | strip_naming_typedef (tree type, dw_die_ref type_die) |
5924 | { |
5925 | if (type |
5926 | && TREE_CODE (type) == RECORD_TYPE |
5927 | && type_die |
5928 | && type_die->die_tag == DW_TAG_typedef |
5929 | && is_naming_typedef_decl (TYPE_NAME (type))) |
5930 | type_die = get_AT_ref (die: type_die, attr_kind: DW_AT_type); |
5931 | return type_die; |
5932 | } |
5933 | |
5934 | /* Like lookup_type_die, but if type is an anonymous type named by a |
5935 | typedef[1], return the DIE of the anonymous type instead the one of |
5936 | the naming typedef. This is because in gen_typedef_die, we did |
5937 | equate the anonymous struct named by the typedef with the DIE of |
5938 | the naming typedef. So by default, lookup_type_die on an anonymous |
5939 | struct yields the DIE of the naming typedef. |
5940 | |
5941 | [1]: Read the comment of is_naming_typedef_decl to learn about what |
5942 | a naming typedef is. */ |
5943 | |
5944 | static inline dw_die_ref |
5945 | lookup_type_die_strip_naming_typedef (tree type) |
5946 | { |
5947 | dw_die_ref die = lookup_type_die (type); |
5948 | return strip_naming_typedef (type, type_die: die); |
5949 | } |
5950 | |
5951 | /* Equate a DIE to a given type specifier. */ |
5952 | |
5953 | static inline void |
5954 | equate_type_number_to_die (tree type, dw_die_ref type_die) |
5955 | { |
5956 | TYPE_SYMTAB_DIE (type) = type_die; |
5957 | } |
5958 | |
5959 | static dw_die_ref maybe_create_die_with_external_ref (tree); |
5960 | struct GTY(()) sym_off_pair |
5961 | { |
5962 | const char * GTY((skip)) sym; |
5963 | unsigned HOST_WIDE_INT off; |
5964 | }; |
5965 | static GTY(()) hash_map<tree, sym_off_pair> *external_die_map; |
5966 | |
5967 | /* Returns a hash value for X (which really is a die_struct). */ |
5968 | |
5969 | inline hashval_t |
5970 | decl_die_hasher::hash (die_node *x) |
5971 | { |
5972 | return (hashval_t) x->decl_id; |
5973 | } |
5974 | |
5975 | /* Return true if decl_id of die_struct X is the same as UID of decl *Y. */ |
5976 | |
5977 | inline bool |
5978 | decl_die_hasher::equal (die_node *x, tree y) |
5979 | { |
5980 | return (x->decl_id == DECL_UID (y)); |
5981 | } |
5982 | |
5983 | /* Return the DIE associated with a given declaration. */ |
5984 | |
5985 | dw_die_ref |
5986 | lookup_decl_die (tree decl) |
5987 | { |
5988 | dw_die_ref *die = decl_die_table->find_slot_with_hash (comparable: decl, DECL_UID (decl), |
5989 | insert: NO_INSERT); |
5990 | if (!die) |
5991 | { |
5992 | if (in_lto_p) |
5993 | return maybe_create_die_with_external_ref (decl); |
5994 | return NULL; |
5995 | } |
5996 | if ((*die)->removed) |
5997 | { |
5998 | decl_die_table->clear_slot (slot: die); |
5999 | return NULL; |
6000 | } |
6001 | return *die; |
6002 | } |
6003 | |
6004 | |
6005 | /* Return the DIE associated with BLOCK. */ |
6006 | |
6007 | static inline dw_die_ref |
6008 | lookup_block_die (tree block) |
6009 | { |
6010 | dw_die_ref die = BLOCK_DIE (block); |
6011 | if (!die && in_lto_p) |
6012 | return maybe_create_die_with_external_ref (block); |
6013 | return die; |
6014 | } |
6015 | |
6016 | /* Associate DIE with BLOCK. */ |
6017 | |
6018 | static inline void |
6019 | equate_block_to_die (tree block, dw_die_ref die) |
6020 | { |
6021 | BLOCK_DIE (block) = die; |
6022 | } |
6023 | #undef BLOCK_DIE |
6024 | |
6025 | |
6026 | /* For DECL which might have early dwarf output query a SYMBOL + OFFSET |
6027 | style reference. Return true if we found one refering to a DIE for |
6028 | DECL, otherwise return false. */ |
6029 | |
6030 | static bool |
6031 | dwarf2out_die_ref_for_decl (tree decl, const char **sym, |
6032 | unsigned HOST_WIDE_INT *off) |
6033 | { |
6034 | dw_die_ref die; |
6035 | |
6036 | if (in_lto_p) |
6037 | { |
6038 | /* During WPA stage and incremental linking we use a hash-map |
6039 | to store the decl <-> label + offset map. */ |
6040 | if (!external_die_map) |
6041 | return false; |
6042 | sym_off_pair *desc = external_die_map->get (k: decl); |
6043 | if (!desc) |
6044 | return false; |
6045 | *sym = desc->sym; |
6046 | *off = desc->off; |
6047 | return true; |
6048 | } |
6049 | |
6050 | if (TREE_CODE (decl) == BLOCK) |
6051 | die = lookup_block_die (block: decl); |
6052 | else |
6053 | die = lookup_decl_die (decl); |
6054 | if (!die) |
6055 | return false; |
6056 | |
6057 | /* Similar to get_ref_die_offset_label, but using the "correct" |
6058 | label. */ |
6059 | *off = die->die_offset; |
6060 | while (die->die_parent) |
6061 | die = die->die_parent; |
6062 | /* For the containing CU DIE we compute a die_symbol in |
6063 | compute_comp_unit_symbol. */ |
6064 | if (die->die_tag == DW_TAG_compile_unit) |
6065 | { |
6066 | gcc_assert (die->die_id.die_symbol != NULL); |
6067 | *sym = die->die_id.die_symbol; |
6068 | return true; |
6069 | } |
6070 | /* While we can gracefully handle running into say a type unit |
6071 | we don't really want and consider this a bug. */ |
6072 | if (flag_checking) |
6073 | gcc_unreachable (); |
6074 | return false; |
6075 | } |
6076 | |
6077 | /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */ |
6078 | |
6079 | static void |
6080 | add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, |
6081 | const char *symbol, HOST_WIDE_INT offset) |
6082 | { |
6083 | /* Create a fake DIE that contains the reference. Don't use |
6084 | new_die because we don't want to end up in the limbo list. */ |
6085 | /* ??? We probably want to share these, thus put a ref to the DIE |
6086 | we create here to the external_die_map entry. */ |
6087 | dw_die_ref ref = new_die_raw (tag_value: die->die_tag); |
6088 | ref->die_id.die_symbol = symbol; |
6089 | ref->die_offset = offset; |
6090 | ref->with_offset = 1; |
6091 | add_AT_die_ref (die, attr_kind, targ_die: ref); |
6092 | } |
6093 | |
6094 | /* Create a DIE for DECL if required and add a reference to a DIE |
6095 | at SYMBOL + OFFSET which contains attributes dumped early. */ |
6096 | |
6097 | static void |
6098 | dwarf2out_register_external_die (tree decl, const char *sym, |
6099 | unsigned HOST_WIDE_INT off) |
6100 | { |
6101 | if (debug_info_level == DINFO_LEVEL_NONE) |
6102 | return; |
6103 | |
6104 | if (!external_die_map) |
6105 | external_die_map = hash_map<tree, sym_off_pair>::create_ggc (size: 1000); |
6106 | gcc_checking_assert (!external_die_map->get (decl)); |
6107 | sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), .off: off }; |
6108 | external_die_map->put (k: decl, v: p); |
6109 | } |
6110 | |
6111 | /* If we have a registered external DIE for DECL return a new DIE for |
6112 | the concrete instance with an appropriate abstract origin. */ |
6113 | |
6114 | static dw_die_ref |
6115 | maybe_create_die_with_external_ref (tree decl) |
6116 | { |
6117 | if (!external_die_map) |
6118 | return NULL; |
6119 | sym_off_pair *desc = external_die_map->get (k: decl); |
6120 | if (!desc) |
6121 | return NULL; |
6122 | |
6123 | const char *sym = desc->sym; |
6124 | unsigned HOST_WIDE_INT off = desc->off; |
6125 | external_die_map->remove (k: decl); |
6126 | |
6127 | in_lto_p = false; |
6128 | dw_die_ref die = (TREE_CODE (decl) == BLOCK |
6129 | ? lookup_block_die (block: decl) : lookup_decl_die (decl)); |
6130 | gcc_assert (!die); |
6131 | in_lto_p = true; |
6132 | |
6133 | tree ctx; |
6134 | dw_die_ref parent = NULL; |
6135 | /* Need to lookup a DIE for the decls context - the containing |
6136 | function or translation unit. */ |
6137 | if (TREE_CODE (decl) == BLOCK) |
6138 | { |
6139 | ctx = BLOCK_SUPERCONTEXT (decl); |
6140 | /* ??? We do not output DIEs for all scopes thus skip as |
6141 | many DIEs as needed. */ |
6142 | while (TREE_CODE (ctx) == BLOCK |
6143 | && !lookup_block_die (block: ctx)) |
6144 | ctx = BLOCK_SUPERCONTEXT (ctx); |
6145 | } |
6146 | else |
6147 | ctx = DECL_CONTEXT (decl); |
6148 | /* Peel types in the context stack. */ |
6149 | while (ctx && TYPE_P (ctx)) |
6150 | ctx = TYPE_CONTEXT (ctx); |
6151 | /* Likewise namespaces in case we do not want to emit DIEs for them. */ |
6152 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
6153 | while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL) |
6154 | ctx = DECL_CONTEXT (ctx); |
6155 | if (ctx) |
6156 | { |
6157 | if (TREE_CODE (ctx) == BLOCK) |
6158 | parent = lookup_block_die (block: ctx); |
6159 | else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL |
6160 | /* Keep the 1:1 association during WPA. */ |
6161 | && !flag_wpa |
6162 | && flag_incremental_link != INCREMENTAL_LINK_LTO) |
6163 | /* Otherwise all late annotations go to the main CU which |
6164 | imports the original CUs. */ |
6165 | parent = comp_unit_die (); |
6166 | else if (TREE_CODE (ctx) == FUNCTION_DECL |
6167 | && TREE_CODE (decl) != FUNCTION_DECL |
6168 | && TREE_CODE (decl) != PARM_DECL |
6169 | && TREE_CODE (decl) != RESULT_DECL |
6170 | && TREE_CODE (decl) != BLOCK) |
6171 | /* Leave function local entities parent determination to when |
6172 | we process scope vars. */ |
6173 | ; |
6174 | else |
6175 | parent = lookup_decl_die (decl: ctx); |
6176 | } |
6177 | else |
6178 | /* In some cases the FEs fail to set DECL_CONTEXT properly. |
6179 | Handle this case gracefully by globalizing stuff. */ |
6180 | parent = comp_unit_die (); |
6181 | /* Create a DIE "stub". */ |
6182 | switch (TREE_CODE (decl)) |
6183 | { |
6184 | case TRANSLATION_UNIT_DECL: |
6185 | { |
6186 | die = comp_unit_die (); |
6187 | /* We re-target all CU decls to the LTRANS CU DIE, so no need |
6188 | to create a DIE for the original CUs. */ |
6189 | return die; |
6190 | } |
6191 | case NAMESPACE_DECL: |
6192 | if (is_fortran (decl)) |
6193 | die = new_die (tag_value: DW_TAG_module, parent_die: parent, t: decl); |
6194 | else |
6195 | die = new_die (tag_value: DW_TAG_namespace, parent_die: parent, t: decl); |
6196 | break; |
6197 | case FUNCTION_DECL: |
6198 | die = new_die (tag_value: DW_TAG_subprogram, parent_die: parent, t: decl); |
6199 | break; |
6200 | case VAR_DECL: |
6201 | die = new_die (tag_value: DW_TAG_variable, parent_die: parent, t: decl); |
6202 | break; |
6203 | case RESULT_DECL: |
6204 | die = new_die (tag_value: DW_TAG_variable, parent_die: parent, t: decl); |
6205 | break; |
6206 | case PARM_DECL: |
6207 | die = new_die (tag_value: DW_TAG_formal_parameter, parent_die: parent, t: decl); |
6208 | break; |
6209 | case CONST_DECL: |
6210 | die = new_die (tag_value: DW_TAG_constant, parent_die: parent, t: decl); |
6211 | break; |
6212 | case LABEL_DECL: |
6213 | die = new_die (tag_value: DW_TAG_label, parent_die: parent, t: decl); |
6214 | break; |
6215 | case BLOCK: |
6216 | die = new_die (tag_value: DW_TAG_lexical_block, parent_die: parent, t: decl); |
6217 | break; |
6218 | default: |
6219 | gcc_unreachable (); |
6220 | } |
6221 | if (TREE_CODE (decl) == BLOCK) |
6222 | equate_block_to_die (block: decl, die); |
6223 | else |
6224 | equate_decl_number_to_die (decl, die); |
6225 | |
6226 | add_desc_attribute (die, decl); |
6227 | |
6228 | /* Add a reference to the DIE providing early debug at $sym + off. */ |
6229 | add_AT_external_die_ref (die, attr_kind: DW_AT_abstract_origin, symbol: sym, offset: off); |
6230 | |
6231 | return die; |
6232 | } |
6233 | |
6234 | /* Returns a hash value for X (which really is a var_loc_list). */ |
6235 | |
6236 | inline hashval_t |
6237 | decl_loc_hasher::hash (var_loc_list *x) |
6238 | { |
6239 | return (hashval_t) x->decl_id; |
6240 | } |
6241 | |
6242 | /* Return true if decl_id of var_loc_list X is the same as |
6243 | UID of decl *Y. */ |
6244 | |
6245 | inline bool |
6246 | decl_loc_hasher::equal (var_loc_list *x, const_tree y) |
6247 | { |
6248 | return (x->decl_id == DECL_UID (y)); |
6249 | } |
6250 | |
6251 | /* Return the var_loc list associated with a given declaration. */ |
6252 | |
6253 | static inline var_loc_list * |
6254 | lookup_decl_loc (const_tree decl) |
6255 | { |
6256 | if (!decl_loc_table) |
6257 | return NULL; |
6258 | return decl_loc_table->find_with_hash (comparable: decl, DECL_UID (decl)); |
6259 | } |
6260 | |
6261 | /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */ |
6262 | |
6263 | inline hashval_t |
6264 | dw_loc_list_hasher::hash (cached_dw_loc_list *x) |
6265 | { |
6266 | return (hashval_t) x->decl_id; |
6267 | } |
6268 | |
6269 | /* Return true if decl_id of cached_dw_loc_list X is the same as |
6270 | UID of decl *Y. */ |
6271 | |
6272 | inline bool |
6273 | dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y) |
6274 | { |
6275 | return (x->decl_id == DECL_UID (y)); |
6276 | } |
6277 | |
6278 | /* Equate a DIE to a particular declaration. */ |
6279 | |
6280 | static void |
6281 | equate_decl_number_to_die (tree decl, dw_die_ref decl_die) |
6282 | { |
6283 | unsigned int decl_id = DECL_UID (decl); |
6284 | |
6285 | *decl_die_table->find_slot_with_hash (comparable: decl, hash: decl_id, insert: INSERT) = decl_die; |
6286 | decl_die->decl_id = decl_id; |
6287 | } |
6288 | |
6289 | /* Return how many bits covers PIECE EXPR_LIST. */ |
6290 | |
6291 | static HOST_WIDE_INT |
6292 | decl_piece_bitsize (rtx piece) |
6293 | { |
6294 | int ret = (int) GET_MODE (piece); |
6295 | if (ret) |
6296 | return ret; |
6297 | gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT |
6298 | && CONST_INT_P (XEXP (XEXP (piece, 0), 0))); |
6299 | return INTVAL (XEXP (XEXP (piece, 0), 0)); |
6300 | } |
6301 | |
6302 | /* Return pointer to the location of location note in PIECE EXPR_LIST. */ |
6303 | |
6304 | static rtx * |
6305 | decl_piece_varloc_ptr (rtx piece) |
6306 | { |
6307 | if ((int) GET_MODE (piece)) |
6308 | return &XEXP (piece, 0); |
6309 | else |
6310 | return &XEXP (XEXP (piece, 0), 1); |
6311 | } |
6312 | |
6313 | /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits. |
6314 | Next is the chain of following piece nodes. */ |
6315 | |
6316 | static rtx_expr_list * |
6317 | decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next) |
6318 | { |
6319 | if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE) |
6320 | return alloc_EXPR_LIST (bitsize, loc_note, next); |
6321 | else |
6322 | return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode, |
6323 | GEN_INT (bitsize), |
6324 | loc_note), next); |
6325 | } |
6326 | |
6327 | /* Return rtx that should be stored into loc field for |
6328 | LOC_NOTE and BITPOS/BITSIZE. */ |
6329 | |
6330 | static rtx |
6331 | construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos, |
6332 | HOST_WIDE_INT bitsize) |
6333 | { |
6334 | if (bitsize != -1) |
6335 | { |
6336 | loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX); |
6337 | if (bitpos != 0) |
6338 | loc_note = decl_piece_node (NULL_RTX, bitsize: bitpos, next: loc_note); |
6339 | } |
6340 | return loc_note; |
6341 | } |
6342 | |
6343 | /* This function either modifies location piece list *DEST in |
6344 | place (if SRC and INNER is NULL), or copies location piece list |
6345 | *SRC to *DEST while modifying it. Location BITPOS is modified |
6346 | to contain LOC_NOTE, any pieces overlapping it are removed resp. |
6347 | not copied and if needed some padding around it is added. |
6348 | When modifying in place, DEST should point to EXPR_LIST where |
6349 | earlier pieces cover PIECE_BITPOS bits, when copying SRC points |
6350 | to the start of the whole list and INNER points to the EXPR_LIST |
6351 | where earlier pieces cover PIECE_BITPOS bits. */ |
6352 | |
6353 | static void |
6354 | adjust_piece_list (rtx *dest, rtx *src, rtx *inner, |
6355 | HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos, |
6356 | HOST_WIDE_INT bitsize, rtx loc_note) |
6357 | { |
6358 | HOST_WIDE_INT diff; |
6359 | bool copy = inner != NULL; |
6360 | |
6361 | if (copy) |
6362 | { |
6363 | /* First copy all nodes preceding the current bitpos. */ |
6364 | while (src != inner) |
6365 | { |
6366 | *dest = decl_piece_node (loc_note: *decl_piece_varloc_ptr (piece: *src), |
6367 | bitsize: decl_piece_bitsize (piece: *src), NULL_RTX); |
6368 | dest = &XEXP (*dest, 1); |
6369 | src = &XEXP (*src, 1); |
6370 | } |
6371 | } |
6372 | /* Add padding if needed. */ |
6373 | if (bitpos != piece_bitpos) |
6374 | { |
6375 | *dest = decl_piece_node (NULL_RTX, bitsize: bitpos - piece_bitpos, |
6376 | next: copy ? NULL_RTX : *dest); |
6377 | dest = &XEXP (*dest, 1); |
6378 | } |
6379 | else if (*dest && decl_piece_bitsize (piece: *dest) == bitsize) |
6380 | { |
6381 | gcc_assert (!copy); |
6382 | /* A piece with correct bitpos and bitsize already exist, |
6383 | just update the location for it and return. */ |
6384 | *decl_piece_varloc_ptr (piece: *dest) = loc_note; |
6385 | return; |
6386 | } |
6387 | /* Add the piece that changed. */ |
6388 | *dest = decl_piece_node (loc_note, bitsize, next: copy ? NULL_RTX : *dest); |
6389 | dest = &XEXP (*dest, 1); |
6390 | /* Skip over pieces that overlap it. */ |
6391 | diff = bitpos - piece_bitpos + bitsize; |
6392 | if (!copy) |
6393 | src = dest; |
6394 | while (diff > 0 && *src) |
6395 | { |
6396 | rtx piece = *src; |
6397 | diff -= decl_piece_bitsize (piece); |
6398 | if (copy) |
6399 | src = &XEXP (piece, 1); |
6400 | else |
6401 | { |
6402 | *src = XEXP (piece, 1); |
6403 | free_EXPR_LIST_node (piece); |
6404 | } |
6405 | } |
6406 | /* Add padding if needed. */ |
6407 | if (diff < 0 && *src) |
6408 | { |
6409 | if (!copy) |
6410 | dest = src; |
6411 | *dest = decl_piece_node (NULL_RTX, bitsize: -diff, next: copy ? NULL_RTX : *dest); |
6412 | dest = &XEXP (*dest, 1); |
6413 | } |
6414 | if (!copy) |
6415 | return; |
6416 | /* Finally copy all nodes following it. */ |
6417 | while (*src) |
6418 | { |
6419 | *dest = decl_piece_node (loc_note: *decl_piece_varloc_ptr (piece: *src), |
6420 | bitsize: decl_piece_bitsize (piece: *src), NULL_RTX); |
6421 | dest = &XEXP (*dest, 1); |
6422 | src = &XEXP (*src, 1); |
6423 | } |
6424 | } |
6425 | |
6426 | /* Add a variable location node to the linked list for DECL. */ |
6427 | |
6428 | static struct var_loc_node * |
6429 | add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view) |
6430 | { |
6431 | unsigned int decl_id; |
6432 | var_loc_list *temp; |
6433 | struct var_loc_node *loc = NULL; |
6434 | HOST_WIDE_INT bitsize = -1, bitpos = -1; |
6435 | |
6436 | if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl)) |
6437 | { |
6438 | tree realdecl = DECL_DEBUG_EXPR (decl); |
6439 | if (handled_component_p (t: realdecl) |
6440 | || (TREE_CODE (realdecl) == MEM_REF |
6441 | && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR)) |
6442 | { |
6443 | bool reverse; |
6444 | tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos, |
6445 | &bitsize, &reverse); |
6446 | if (!innerdecl |
6447 | || !DECL_P (innerdecl) |
6448 | || DECL_IGNORED_P (innerdecl) |
6449 | || TREE_STATIC (innerdecl) |
6450 | || bitsize == 0 |
6451 | || bitpos + bitsize > 256) |
6452 | return NULL; |
6453 | decl = innerdecl; |
6454 | } |
6455 | } |
6456 | |
6457 | decl_id = DECL_UID (decl); |
6458 | var_loc_list **slot |
6459 | = decl_loc_table->find_slot_with_hash (comparable: decl, hash: decl_id, insert: INSERT); |
6460 | if (*slot == NULL) |
6461 | { |
6462 | temp = ggc_cleared_alloc<var_loc_list> (); |
6463 | temp->decl_id = decl_id; |
6464 | *slot = temp; |
6465 | } |
6466 | else |
6467 | temp = *slot; |
6468 | |
6469 | /* For PARM_DECLs try to keep around the original incoming value, |
6470 | even if that means we'll emit a zero-range .debug_loc entry. */ |
6471 | if (temp->last |
6472 | && temp->first == temp->last |
6473 | && TREE_CODE (decl) == PARM_DECL |
6474 | && NOTE_P (temp->first->loc) |
6475 | && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl |
6476 | && DECL_INCOMING_RTL (decl) |
6477 | && NOTE_VAR_LOCATION_LOC (temp->first->loc) |
6478 | && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc)) |
6479 | == GET_CODE (DECL_INCOMING_RTL (decl)) |
6480 | && prev_real_insn (as_a<rtx_insn *> (p: temp->first->loc)) == NULL_RTX |
6481 | && (bitsize != -1 |
6482 | || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc), |
6483 | NOTE_VAR_LOCATION_LOC (loc_note)) |
6484 | || (NOTE_VAR_LOCATION_STATUS (temp->first->loc) |
6485 | != NOTE_VAR_LOCATION_STATUS (loc_note)))) |
6486 | { |
6487 | loc = ggc_cleared_alloc<var_loc_node> (); |
6488 | temp->first->next = loc; |
6489 | temp->last = loc; |
6490 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
6491 | } |
6492 | else if (temp->last) |
6493 | { |
6494 | struct var_loc_node *last = temp->last, *unused = NULL; |
6495 | rtx *piece_loc = NULL, last_loc_note; |
6496 | HOST_WIDE_INT piece_bitpos = 0; |
6497 | if (last->next) |
6498 | { |
6499 | last = last->next; |
6500 | gcc_assert (last->next == NULL); |
6501 | } |
6502 | if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST) |
6503 | { |
6504 | piece_loc = &last->loc; |
6505 | do |
6506 | { |
6507 | HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (piece: *piece_loc); |
6508 | if (piece_bitpos + cur_bitsize > bitpos) |
6509 | break; |
6510 | piece_bitpos += cur_bitsize; |
6511 | piece_loc = &XEXP (*piece_loc, 1); |
6512 | } |
6513 | while (*piece_loc); |
6514 | } |
6515 | /* TEMP->LAST here is either pointer to the last but one or |
6516 | last element in the chained list, LAST is pointer to the |
6517 | last element. */ |
6518 | if (label && strcmp (s1: last->label, s2: label) == 0 && last->view == view) |
6519 | { |
6520 | /* For SRA optimized variables if there weren't any real |
6521 | insns since last note, just modify the last node. */ |
6522 | if (piece_loc != NULL) |
6523 | { |
6524 | adjust_piece_list (dest: piece_loc, NULL, NULL, |
6525 | bitpos, piece_bitpos, bitsize, loc_note); |
6526 | return NULL; |
6527 | } |
6528 | /* If the last note doesn't cover any instructions, remove it. */ |
6529 | if (temp->last != last) |
6530 | { |
6531 | temp->last->next = NULL; |
6532 | unused = last; |
6533 | last = temp->last; |
6534 | gcc_assert (strcmp (last->label, label) != 0 || last->view != view); |
6535 | } |
6536 | else |
6537 | { |
6538 | gcc_assert (temp->first == temp->last |
6539 | || (temp->first->next == temp->last |
6540 | && TREE_CODE (decl) == PARM_DECL)); |
6541 | memset (s: temp->last, c: '\0', n: sizeof (*temp->last)); |
6542 | temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize); |
6543 | return temp->last; |
6544 | } |
6545 | } |
6546 | if (bitsize == -1 && NOTE_P (last->loc)) |
6547 | last_loc_note = last->loc; |
6548 | else if (piece_loc != NULL |
6549 | && *piece_loc != NULL_RTX |
6550 | && piece_bitpos == bitpos |
6551 | && decl_piece_bitsize (piece: *piece_loc) == bitsize) |
6552 | last_loc_note = *decl_piece_varloc_ptr (piece: *piece_loc); |
6553 | else |
6554 | last_loc_note = NULL_RTX; |
6555 | /* If the current location is the same as the end of the list, |
6556 | and either both or neither of the locations is uninitialized, |
6557 | we have nothing to do. */ |
6558 | if (last_loc_note == NULL_RTX |
6559 | || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note), |
6560 | NOTE_VAR_LOCATION_LOC (loc_note))) |
6561 | || ((NOTE_VAR_LOCATION_STATUS (last_loc_note) |
6562 | != NOTE_VAR_LOCATION_STATUS (loc_note)) |
6563 | && ((NOTE_VAR_LOCATION_STATUS (last_loc_note) |
6564 | == VAR_INIT_STATUS_UNINITIALIZED) |
6565 | || (NOTE_VAR_LOCATION_STATUS (loc_note) |
6566 | == VAR_INIT_STATUS_UNINITIALIZED)))) |
6567 | { |
6568 | /* Add LOC to the end of list and update LAST. If the last |
6569 | element of the list has been removed above, reuse its |
6570 | memory for the new node, otherwise allocate a new one. */ |
6571 | if (unused) |
6572 | { |
6573 | loc = unused; |
6574 | memset (s: loc, c: '\0', n: sizeof (*loc)); |
6575 | } |
6576 | else |
6577 | loc = ggc_cleared_alloc<var_loc_node> (); |
6578 | if (bitsize == -1 || piece_loc == NULL) |
6579 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
6580 | else |
6581 | adjust_piece_list (dest: &loc->loc, src: &last->loc, inner: piece_loc, |
6582 | bitpos, piece_bitpos, bitsize, loc_note); |
6583 | last->next = loc; |
6584 | /* Ensure TEMP->LAST will point either to the new last but one |
6585 | element of the chain, or to the last element in it. */ |
6586 | if (last != temp->last) |
6587 | temp->last = last; |
6588 | } |
6589 | else if (unused) |
6590 | ggc_free (unused); |
6591 | } |
6592 | else |
6593 | { |
6594 | loc = ggc_cleared_alloc<var_loc_node> (); |
6595 | temp->first = loc; |
6596 | temp->last = loc; |
6597 | loc->loc = construct_piece_list (loc_note, bitpos, bitsize); |
6598 | } |
6599 | return loc; |
6600 | } |
6601 | |
6602 | /* Keep track of the number of spaces used to indent the |
6603 | output of the debugging routines that print the structure of |
6604 | the DIE internal representation. */ |
6605 | static int print_indent; |
6606 | |
6607 | /* Indent the line the number of spaces given by print_indent. */ |
6608 | |
6609 | static inline void |
6610 | print_spaces (FILE *outfile) |
6611 | { |
6612 | fprintf (stream: outfile, format: "%*s" , print_indent, "" ); |
6613 | } |
6614 | |
6615 | /* Print a type signature in hex. */ |
6616 | |
6617 | static inline void |
6618 | print_signature (FILE *outfile, char *sig) |
6619 | { |
6620 | int i; |
6621 | |
6622 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
6623 | fprintf (stream: outfile, format: "%02x" , sig[i] & 0xff); |
6624 | } |
6625 | |
6626 | static inline void |
6627 | print_discr_value (FILE *outfile, dw_discr_value *discr_value) |
6628 | { |
6629 | if (discr_value->pos) |
6630 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval); |
6631 | else |
6632 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval); |
6633 | } |
6634 | |
6635 | static void print_loc_descr (dw_loc_descr_ref, FILE *); |
6636 | |
6637 | /* Print the value associated to the VAL DWARF value node to OUTFILE. If |
6638 | RECURSE, output location descriptor operations. */ |
6639 | |
6640 | static void |
6641 | print_dw_val (dw_val_node *val, bool recurse, FILE *outfile) |
6642 | { |
6643 | switch (val->val_class) |
6644 | { |
6645 | case dw_val_class_addr: |
6646 | fprintf (stream: outfile, format: "address" ); |
6647 | break; |
6648 | case dw_val_class_offset: |
6649 | fprintf (stream: outfile, format: "offset" ); |
6650 | break; |
6651 | case dw_val_class_loc: |
6652 | fprintf (stream: outfile, format: "location descriptor" ); |
6653 | if (val->v.val_loc == NULL) |
6654 | fprintf (stream: outfile, format: " -> <null>" ); |
6655 | else if (recurse) |
6656 | { |
6657 | fprintf (stream: outfile, format: ":\n" ); |
6658 | print_indent += 4; |
6659 | print_loc_descr (val->v.val_loc, outfile); |
6660 | print_indent -= 4; |
6661 | } |
6662 | else |
6663 | { |
6664 | if (flag_dump_noaddr || flag_dump_unnumbered) |
6665 | fprintf (stream: outfile, format: " #" ); |
6666 | else |
6667 | fprintf (stream: outfile, format: " (%p)" , (void *) val->v.val_loc); |
6668 | } |
6669 | break; |
6670 | case dw_val_class_loc_list: |
6671 | fprintf (stream: outfile, format: "location list -> label:%s" , |
6672 | val->v.val_loc_list->ll_symbol); |
6673 | break; |
6674 | case dw_val_class_view_list: |
6675 | val = view_list_to_loc_list_val_node (val); |
6676 | fprintf (stream: outfile, format: "location list with views -> labels:%s and %s" , |
6677 | val->v.val_loc_list->ll_symbol, |
6678 | val->v.val_loc_list->vl_symbol); |
6679 | break; |
6680 | case dw_val_class_range_list: |
6681 | fprintf (stream: outfile, format: "range list" ); |
6682 | break; |
6683 | case dw_val_class_const: |
6684 | case dw_val_class_const_implicit: |
6685 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int); |
6686 | break; |
6687 | case dw_val_class_unsigned_const: |
6688 | case dw_val_class_unsigned_const_implicit: |
6689 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned); |
6690 | break; |
6691 | case dw_val_class_const_double: |
6692 | fprintf (stream: outfile, format: "constant (" HOST_WIDE_INT_PRINT_DEC"," \ |
6693 | HOST_WIDE_INT_PRINT_UNSIGNED")" , |
6694 | val->v.val_double.high, |
6695 | val->v.val_double.low); |
6696 | break; |
6697 | case dw_val_class_wide_int: |
6698 | { |
6699 | int i = val->v.val_wide->get_len (); |
6700 | fprintf (stream: outfile, format: "constant (" ); |
6701 | gcc_assert (i > 0); |
6702 | if (val->v.val_wide->elt (i: i - 1) == 0) |
6703 | fprintf (stream: outfile, format: "0x" ); |
6704 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_HEX, |
6705 | val->v.val_wide->elt (i: --i)); |
6706 | while (--i >= 0) |
6707 | fprintf (stream: outfile, HOST_WIDE_INT_PRINT_PADDED_HEX, |
6708 | val->v.val_wide->elt (i)); |
6709 | fprintf (stream: outfile, format: ")" ); |
6710 | break; |
6711 | } |
6712 | case dw_val_class_vec: |
6713 | fprintf (stream: outfile, format: "floating-point or vector constant" ); |
6714 | break; |
6715 | case dw_val_class_flag: |
6716 | fprintf (stream: outfile, format: "%u" , val->v.val_flag); |
6717 | break; |
6718 | case dw_val_class_die_ref: |
6719 | if (val->v.val_die_ref.die != NULL) |
6720 | { |
6721 | dw_die_ref die = val->v.val_die_ref.die; |
6722 | |
6723 | if (die->comdat_type_p) |
6724 | { |
6725 | fprintf (stream: outfile, format: "die -> signature: " ); |
6726 | print_signature (outfile, |
6727 | sig: die->die_id.die_type_node->signature); |
6728 | } |
6729 | else if (die->die_id.die_symbol) |
6730 | { |
6731 | fprintf (stream: outfile, format: "die -> label: %s" , die->die_id.die_symbol); |
6732 | if (die->with_offset) |
6733 | fprintf (stream: outfile, format: " + %ld" , die->die_offset); |
6734 | } |
6735 | else |
6736 | fprintf (stream: outfile, format: "die -> %ld" , die->die_offset); |
6737 | if (flag_dump_noaddr || flag_dump_unnumbered) |
6738 | fprintf (stream: outfile, format: " #" ); |
6739 | else |
6740 | fprintf (stream: outfile, format: " (%p)" , (void *) die); |
6741 | } |
6742 | else |
6743 | fprintf (stream: outfile, format: "die -> <null>" ); |
6744 | break; |
6745 | case dw_val_class_vms_delta: |
6746 | fprintf (stream: outfile, format: "delta: @slotcount(%s-%s)" , |
6747 | val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1); |
6748 | break; |
6749 | case dw_val_class_symview: |
6750 | fprintf (stream: outfile, format: "view: %s" , val->v.val_symbolic_view); |
6751 | break; |
6752 | case dw_val_class_lbl_id: |
6753 | case dw_val_class_lineptr: |
6754 | case dw_val_class_macptr: |
6755 | case dw_val_class_loclistsptr: |
6756 | case dw_val_class_high_pc: |
6757 | fprintf (stream: outfile, format: "label: %s" , val->v.val_lbl_id); |
6758 | break; |
6759 | case dw_val_class_str: |
6760 | if (val->v.val_str->str != NULL) |
6761 | fprintf (stream: outfile, format: "\"%s\"" , val->v.val_str->str); |
6762 | else |
6763 | fprintf (stream: outfile, format: "<null>" ); |
6764 | break; |
6765 | case dw_val_class_file: |
6766 | case dw_val_class_file_implicit: |
6767 | fprintf (stream: outfile, format: "\"%s\" (%d)" , val->v.val_file->filename, |
6768 | val->v.val_file->emitted_number); |
6769 | break; |
6770 | case dw_val_class_data8: |
6771 | { |
6772 | int i; |
6773 | |
6774 | for (i = 0; i < 8; i++) |
6775 | fprintf (stream: outfile, format: "%02x" , val->v.val_data8[i]); |
6776 | break; |
6777 | } |
6778 | case dw_val_class_discr_value: |
6779 | print_discr_value (outfile, discr_value: &val->v.val_discr_value); |
6780 | break; |
6781 | case dw_val_class_discr_list: |
6782 | for (dw_discr_list_ref node = val->v.val_discr_list; |
6783 | node != NULL; |
6784 | node = node->dw_discr_next) |
6785 | { |
6786 | if (node->dw_discr_range) |
6787 | { |
6788 | fprintf (stream: outfile, format: " .. " ); |
6789 | print_discr_value (outfile, discr_value: &node->dw_discr_lower_bound); |
6790 | print_discr_value (outfile, discr_value: &node->dw_discr_upper_bound); |
6791 | } |
6792 | else |
6793 | print_discr_value (outfile, discr_value: &node->dw_discr_lower_bound); |
6794 | |
6795 | if (node->dw_discr_next != NULL) |
6796 | fprintf (stream: outfile, format: " | " ); |
6797 | } |
6798 | default: |
6799 | break; |
6800 | } |
6801 | } |
6802 | |
6803 | /* Likewise, for a DIE attribute. */ |
6804 | |
6805 | static void |
6806 | print_attribute (dw_attr_node *a, bool recurse, FILE *outfile) |
6807 | { |
6808 | print_dw_val (val: &a->dw_attr_val, recurse, outfile); |
6809 | } |
6810 | |
6811 | |
6812 | /* Print the list of operands in the LOC location description to OUTFILE. This |
6813 | routine is a debugging aid only. */ |
6814 | |
6815 | static void |
6816 | print_loc_descr (dw_loc_descr_ref loc, FILE *outfile) |
6817 | { |
6818 | dw_loc_descr_ref l = loc; |
6819 | |
6820 | if (loc == NULL) |
6821 | { |
6822 | print_spaces (outfile); |
6823 | fprintf (stream: outfile, format: "<null>\n" ); |
6824 | return; |
6825 | } |
6826 | |
6827 | for (l = loc; l != NULL; l = l->dw_loc_next) |
6828 | { |
6829 | print_spaces (outfile); |
6830 | if (flag_dump_noaddr || flag_dump_unnumbered) |
6831 | fprintf (stream: outfile, format: "#" ); |
6832 | else |
6833 | fprintf (stream: outfile, format: "(%p)" , (void *) l); |
6834 | fprintf (stream: outfile, format: " %s" , |
6835 | dwarf_stack_op_name (op: l->dw_loc_opc)); |
6836 | if (l->dw_loc_oprnd1.val_class != dw_val_class_none) |
6837 | { |
6838 | fprintf (stream: outfile, format: " " ); |
6839 | print_dw_val (val: &l->dw_loc_oprnd1, recurse: false, outfile); |
6840 | } |
6841 | if (l->dw_loc_oprnd2.val_class != dw_val_class_none) |
6842 | { |
6843 | fprintf (stream: outfile, format: ", " ); |
6844 | print_dw_val (val: &l->dw_loc_oprnd2, recurse: false, outfile); |
6845 | } |
6846 | fprintf (stream: outfile, format: "\n" ); |
6847 | } |
6848 | } |
6849 | |
6850 | /* Print the information associated with a given DIE, and its children. |
6851 | This routine is a debugging aid only. */ |
6852 | |
6853 | static void |
6854 | print_die (dw_die_ref die, FILE *outfile) |
6855 | { |
6856 | dw_attr_node *a; |
6857 | dw_die_ref c; |
6858 | unsigned ix; |
6859 | |
6860 | print_spaces (outfile); |
6861 | fprintf (stream: outfile, format: "DIE %4ld: %s " , |
6862 | die->die_offset, dwarf_tag_name (tag: die->die_tag)); |
6863 | if (flag_dump_noaddr || flag_dump_unnumbered) |
6864 | fprintf (stream: outfile, format: "#\n" ); |
6865 | else |
6866 | fprintf (stream: outfile, format: "(%p)\n" , (void*) die); |
6867 | print_spaces (outfile); |
6868 | fprintf (stream: outfile, format: " abbrev id: %lu" , die->die_abbrev); |
6869 | fprintf (stream: outfile, format: " offset: %ld" , die->die_offset); |
6870 | fprintf (stream: outfile, format: " mark: %d\n" , die->die_mark); |
6871 | |
6872 | if (die->comdat_type_p) |
6873 | { |
6874 | print_spaces (outfile); |
6875 | fprintf (stream: outfile, format: " signature: " ); |
6876 | print_signature (outfile, sig: die->die_id.die_type_node->signature); |
6877 | fprintf (stream: outfile, format: "\n" ); |
6878 | } |
6879 | |
6880 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
6881 | { |
6882 | print_spaces (outfile); |
6883 | fprintf (stream: outfile, format: " %s: " , dwarf_attr_name (attr: a->dw_attr)); |
6884 | |
6885 | print_attribute (a, recurse: true, outfile); |
6886 | fprintf (stream: outfile, format: "\n" ); |
6887 | } |
6888 | |
6889 | if (die->die_child != NULL) |
6890 | { |
6891 | print_indent += 4; |
6892 | FOR_EACH_CHILD (die, c, print_die (c, outfile)); |
6893 | print_indent -= 4; |
6894 | } |
6895 | if (print_indent == 0) |
6896 | fprintf (stream: outfile, format: "\n" ); |
6897 | } |
6898 | |
6899 | /* Print the list of operations in the LOC location description. */ |
6900 | |
6901 | DEBUG_FUNCTION void |
6902 | debug_dwarf_loc_descr (dw_loc_descr_ref loc) |
6903 | { |
6904 | print_loc_descr (loc, stderr); |
6905 | } |
6906 | |
6907 | /* Print the information collected for a given DIE. */ |
6908 | |
6909 | DEBUG_FUNCTION void |
6910 | debug_dwarf_die (dw_die_ref die) |
6911 | { |
6912 | print_die (die, stderr); |
6913 | } |
6914 | |
6915 | DEBUG_FUNCTION void |
6916 | debug (die_struct &ref) |
6917 | { |
6918 | print_die (die: &ref, stderr); |
6919 | } |
6920 | |
6921 | DEBUG_FUNCTION void |
6922 | debug (die_struct *ptr) |
6923 | { |
6924 | if (ptr) |
6925 | debug (ref&: *ptr); |
6926 | else |
6927 | fprintf (stderr, format: "<nil>\n" ); |
6928 | } |
6929 | |
6930 | |
6931 | /* Print all DWARF information collected for the compilation unit. |
6932 | This routine is a debugging aid only. */ |
6933 | |
6934 | DEBUG_FUNCTION void |
6935 | debug_dwarf (void) |
6936 | { |
6937 | print_indent = 0; |
6938 | print_die (die: comp_unit_die (), stderr); |
6939 | } |
6940 | |
6941 | /* Verify the DIE tree structure. */ |
6942 | |
6943 | DEBUG_FUNCTION void |
6944 | verify_die (dw_die_ref die) |
6945 | { |
6946 | gcc_assert (!die->die_mark); |
6947 | if (die->die_parent == NULL |
6948 | && die->die_sib == NULL) |
6949 | return; |
6950 | /* Verify the die_sib list is cyclic. */ |
6951 | dw_die_ref x = die; |
6952 | do |
6953 | { |
6954 | x->die_mark = 1; |
6955 | x = x->die_sib; |
6956 | } |
6957 | while (x && !x->die_mark); |
6958 | gcc_assert (x == die); |
6959 | x = die; |
6960 | do |
6961 | { |
6962 | /* Verify all dies have the same parent. */ |
6963 | gcc_assert (x->die_parent == die->die_parent); |
6964 | if (x->die_child) |
6965 | { |
6966 | /* Verify the child has the proper parent and recurse. */ |
6967 | gcc_assert (x->die_child->die_parent == x); |
6968 | verify_die (die: x->die_child); |
6969 | } |
6970 | x->die_mark = 0; |
6971 | x = x->die_sib; |
6972 | } |
6973 | while (x && x->die_mark); |
6974 | } |
6975 | |
6976 | /* Sanity checks on DIEs. */ |
6977 | |
6978 | static void |
6979 | check_die (dw_die_ref die) |
6980 | { |
6981 | unsigned ix; |
6982 | dw_attr_node *a; |
6983 | bool inline_found = false; |
6984 | int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0; |
6985 | int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0; |
6986 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
6987 | { |
6988 | switch (a->dw_attr) |
6989 | { |
6990 | case DW_AT_inline: |
6991 | if (a->dw_attr_val.v.val_unsigned) |
6992 | inline_found = true; |
6993 | break; |
6994 | case DW_AT_location: |
6995 | ++n_location; |
6996 | break; |
6997 | case DW_AT_low_pc: |
6998 | ++n_low_pc; |
6999 | break; |
7000 | case DW_AT_high_pc: |
7001 | ++n_high_pc; |
7002 | break; |
7003 | case DW_AT_artificial: |
7004 | ++n_artificial; |
7005 | break; |
7006 | case DW_AT_decl_column: |
7007 | ++n_decl_column; |
7008 | break; |
7009 | case DW_AT_decl_line: |
7010 | ++n_decl_line; |
7011 | break; |
7012 | case DW_AT_decl_file: |
7013 | ++n_decl_file; |
7014 | break; |
7015 | default: |
7016 | break; |
7017 | } |
7018 | } |
7019 | if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1 |
7020 | || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1) |
7021 | { |
7022 | fprintf (stderr, format: "Duplicate attributes in DIE:\n" ); |
7023 | debug_dwarf_die (die); |
7024 | gcc_unreachable (); |
7025 | } |
7026 | if (inline_found) |
7027 | { |
7028 | /* A debugging information entry that is a member of an abstract |
7029 | instance tree [that has DW_AT_inline] should not contain any |
7030 | attributes which describe aspects of the subroutine which vary |
7031 | between distinct inlined expansions or distinct out-of-line |
7032 | expansions. */ |
7033 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
7034 | gcc_assert (a->dw_attr != DW_AT_low_pc |
7035 | && a->dw_attr != DW_AT_high_pc |
7036 | && a->dw_attr != DW_AT_location |
7037 | && a->dw_attr != DW_AT_frame_base |
7038 | && a->dw_attr != DW_AT_call_all_calls |
7039 | && a->dw_attr != DW_AT_GNU_all_call_sites); |
7040 | } |
7041 | } |
7042 | |
7043 | #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx) |
7044 | #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx) |
7045 | #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx) |
7046 | |
7047 | /* Calculate the checksum of a location expression. */ |
7048 | |
7049 | static inline void |
7050 | loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx) |
7051 | { |
7052 | int tem; |
7053 | inchash::hash hstate; |
7054 | hashval_t hash; |
7055 | |
7056 | tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc); |
7057 | CHECKSUM (tem); |
7058 | hash_loc_operands (loc, hstate); |
7059 | hash = hstate.end(); |
7060 | CHECKSUM (hash); |
7061 | } |
7062 | |
7063 | /* Calculate the checksum of an attribute. */ |
7064 | |
7065 | static void |
7066 | attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark) |
7067 | { |
7068 | dw_loc_descr_ref loc; |
7069 | rtx r; |
7070 | |
7071 | CHECKSUM (at->dw_attr); |
7072 | |
7073 | /* We don't care that this was compiled with a different compiler |
7074 | snapshot; if the output is the same, that's what matters. */ |
7075 | if (at->dw_attr == DW_AT_producer) |
7076 | return; |
7077 | |
7078 | switch (AT_class (a: at)) |
7079 | { |
7080 | case dw_val_class_const: |
7081 | case dw_val_class_const_implicit: |
7082 | CHECKSUM (at->dw_attr_val.v.val_int); |
7083 | break; |
7084 | case dw_val_class_unsigned_const: |
7085 | case dw_val_class_unsigned_const_implicit: |
7086 | CHECKSUM (at->dw_attr_val.v.val_unsigned); |
7087 | break; |
7088 | case dw_val_class_const_double: |
7089 | CHECKSUM (at->dw_attr_val.v.val_double); |
7090 | break; |
7091 | case dw_val_class_wide_int: |
7092 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (), |
7093 | get_full_len (*at->dw_attr_val.v.val_wide) |
7094 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
7095 | break; |
7096 | case dw_val_class_vec: |
7097 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array, |
7098 | (at->dw_attr_val.v.val_vec.length |
7099 | * at->dw_attr_val.v.val_vec.elt_size)); |
7100 | break; |
7101 | case dw_val_class_flag: |
7102 | CHECKSUM (at->dw_attr_val.v.val_flag); |
7103 | break; |
7104 | case dw_val_class_str: |
7105 | CHECKSUM_STRING (AT_string (at)); |
7106 | break; |
7107 | |
7108 | case dw_val_class_addr: |
7109 | r = AT_addr (a: at); |
7110 | gcc_assert (GET_CODE (r) == SYMBOL_REF); |
7111 | CHECKSUM_STRING (XSTR (r, 0)); |
7112 | break; |
7113 | |
7114 | case dw_val_class_offset: |
7115 | CHECKSUM (at->dw_attr_val.v.val_offset); |
7116 | break; |
7117 | |
7118 | case dw_val_class_loc: |
7119 | for (loc = AT_loc (a: at); loc; loc = loc->dw_loc_next) |
7120 | loc_checksum (loc, ctx); |
7121 | break; |
7122 | |
7123 | case dw_val_class_die_ref: |
7124 | die_checksum (AT_ref (a: at), ctx, mark); |
7125 | break; |
7126 | |
7127 | case dw_val_class_fde_ref: |
7128 | case dw_val_class_vms_delta: |
7129 | case dw_val_class_symview: |
7130 | case dw_val_class_lbl_id: |
7131 | case dw_val_class_lineptr: |
7132 | case dw_val_class_macptr: |
7133 | case dw_val_class_loclistsptr: |
7134 | case dw_val_class_high_pc: |
7135 | break; |
7136 | |
7137 | case dw_val_class_file: |
7138 | case dw_val_class_file_implicit: |
7139 | CHECKSUM_STRING (AT_file (at)->filename); |
7140 | break; |
7141 | |
7142 | case dw_val_class_data8: |
7143 | CHECKSUM (at->dw_attr_val.v.val_data8); |
7144 | break; |
7145 | |
7146 | default: |
7147 | break; |
7148 | } |
7149 | } |
7150 | |
7151 | /* Calculate the checksum of a DIE. */ |
7152 | |
7153 | static void |
7154 | die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark) |
7155 | { |
7156 | dw_die_ref c; |
7157 | dw_attr_node *a; |
7158 | unsigned ix; |
7159 | |
7160 | /* To avoid infinite recursion. */ |
7161 | if (die->die_mark) |
7162 | { |
7163 | CHECKSUM (die->die_mark); |
7164 | return; |
7165 | } |
7166 | die->die_mark = ++(*mark); |
7167 | |
7168 | CHECKSUM (die->die_tag); |
7169 | |
7170 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
7171 | attr_checksum (at: a, ctx, mark); |
7172 | |
7173 | FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark)); |
7174 | } |
7175 | |
7176 | #undef CHECKSUM |
7177 | #undef CHECKSUM_BLOCK |
7178 | #undef CHECKSUM_STRING |
7179 | |
7180 | /* For DWARF-4 types, include the trailing NULL when checksumming strings. */ |
7181 | #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx) |
7182 | #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx) |
7183 | #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx) |
7184 | #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx) |
7185 | #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx) |
7186 | #define CHECKSUM_ATTR(FOO) \ |
7187 | if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark) |
7188 | |
7189 | /* Calculate the checksum of a number in signed LEB128 format. */ |
7190 | |
7191 | static void |
7192 | checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx) |
7193 | { |
7194 | unsigned char byte; |
7195 | bool more; |
7196 | |
7197 | while (1) |
7198 | { |
7199 | byte = (value & 0x7f); |
7200 | value >>= 7; |
7201 | more = !((value == 0 && (byte & 0x40) == 0) |
7202 | || (value == -1 && (byte & 0x40) != 0)); |
7203 | if (more) |
7204 | byte |= 0x80; |
7205 | CHECKSUM (byte); |
7206 | if (!more) |
7207 | break; |
7208 | } |
7209 | } |
7210 | |
7211 | /* Calculate the checksum of a number in unsigned LEB128 format. */ |
7212 | |
7213 | static void |
7214 | checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx) |
7215 | { |
7216 | while (1) |
7217 | { |
7218 | unsigned char byte = (value & 0x7f); |
7219 | value >>= 7; |
7220 | if (value != 0) |
7221 | /* More bytes to follow. */ |
7222 | byte |= 0x80; |
7223 | CHECKSUM (byte); |
7224 | if (value == 0) |
7225 | break; |
7226 | } |
7227 | } |
7228 | |
7229 | /* Checksum the context of the DIE. This adds the names of any |
7230 | surrounding namespaces or structures to the checksum. */ |
7231 | |
7232 | static void |
7233 | checksum_die_context (dw_die_ref die, struct md5_ctx *ctx) |
7234 | { |
7235 | const char *name; |
7236 | dw_die_ref spec; |
7237 | int tag = die->die_tag; |
7238 | |
7239 | if (tag != DW_TAG_namespace |
7240 | && tag != DW_TAG_structure_type |
7241 | && tag != DW_TAG_class_type) |
7242 | return; |
7243 | |
7244 | name = get_AT_string (die, attr_kind: DW_AT_name); |
7245 | |
7246 | spec = get_AT_ref (die, attr_kind: DW_AT_specification); |
7247 | if (spec != NULL) |
7248 | die = spec; |
7249 | |
7250 | if (die->die_parent != NULL) |
7251 | checksum_die_context (die: die->die_parent, ctx); |
7252 | |
7253 | CHECKSUM_ULEB128 ('C'); |
7254 | CHECKSUM_ULEB128 (tag); |
7255 | if (name != NULL) |
7256 | CHECKSUM_STRING (name); |
7257 | } |
7258 | |
7259 | /* Calculate the checksum of a location expression. */ |
7260 | |
7261 | static inline void |
7262 | loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx) |
7263 | { |
7264 | /* Special case for lone DW_OP_plus_uconst: checksum as if the location |
7265 | were emitted as a DW_FORM_sdata instead of a location expression. */ |
7266 | if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL) |
7267 | { |
7268 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
7269 | CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned); |
7270 | return; |
7271 | } |
7272 | |
7273 | /* Otherwise, just checksum the raw location expression. */ |
7274 | while (loc != NULL) |
7275 | { |
7276 | inchash::hash hstate; |
7277 | hashval_t hash; |
7278 | |
7279 | CHECKSUM_ULEB128 (loc->dtprel); |
7280 | CHECKSUM_ULEB128 (loc->dw_loc_opc); |
7281 | hash_loc_operands (loc, hstate); |
7282 | hash = hstate.end (); |
7283 | CHECKSUM (hash); |
7284 | loc = loc->dw_loc_next; |
7285 | } |
7286 | } |
7287 | |
7288 | /* Calculate the checksum of an attribute. */ |
7289 | |
7290 | static void |
7291 | attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at, |
7292 | struct md5_ctx *ctx, int *mark) |
7293 | { |
7294 | dw_loc_descr_ref loc; |
7295 | rtx r; |
7296 | |
7297 | if (AT_class (a: at) == dw_val_class_die_ref) |
7298 | { |
7299 | dw_die_ref target_die = AT_ref (a: at); |
7300 | |
7301 | /* For pointer and reference types, we checksum only the (qualified) |
7302 | name of the target type (if there is a name). For friend entries, |
7303 | we checksum only the (qualified) name of the target type or function. |
7304 | This allows the checksum to remain the same whether the target type |
7305 | is complete or not. */ |
7306 | if ((at->dw_attr == DW_AT_type |
7307 | && (tag == DW_TAG_pointer_type |
7308 | || tag == DW_TAG_reference_type |
7309 | || tag == DW_TAG_rvalue_reference_type |
7310 | || tag == DW_TAG_ptr_to_member_type)) |
7311 | || (at->dw_attr == DW_AT_friend |
7312 | && tag == DW_TAG_friend)) |
7313 | { |
7314 | dw_attr_node *name_attr = get_AT (die: target_die, attr_kind: DW_AT_name); |
7315 | |
7316 | if (name_attr != NULL) |
7317 | { |
7318 | dw_die_ref decl = get_AT_ref (die: target_die, attr_kind: DW_AT_specification); |
7319 | |
7320 | if (decl == NULL) |
7321 | decl = target_die; |
7322 | CHECKSUM_ULEB128 ('N'); |
7323 | CHECKSUM_ULEB128 (at->dw_attr); |
7324 | if (decl->die_parent != NULL) |
7325 | checksum_die_context (die: decl->die_parent, ctx); |
7326 | CHECKSUM_ULEB128 ('E'); |
7327 | CHECKSUM_STRING (AT_string (name_attr)); |
7328 | return; |
7329 | } |
7330 | } |
7331 | |
7332 | /* For all other references to another DIE, we check to see if the |
7333 | target DIE has already been visited. If it has, we emit a |
7334 | backward reference; if not, we descend recursively. */ |
7335 | if (target_die->die_mark > 0) |
7336 | { |
7337 | CHECKSUM_ULEB128 ('R'); |
7338 | CHECKSUM_ULEB128 (at->dw_attr); |
7339 | CHECKSUM_ULEB128 (target_die->die_mark); |
7340 | } |
7341 | else |
7342 | { |
7343 | dw_die_ref decl = get_AT_ref (die: target_die, attr_kind: DW_AT_specification); |
7344 | |
7345 | if (decl == NULL) |
7346 | decl = target_die; |
7347 | target_die->die_mark = ++(*mark); |
7348 | CHECKSUM_ULEB128 ('T'); |
7349 | CHECKSUM_ULEB128 (at->dw_attr); |
7350 | if (decl->die_parent != NULL) |
7351 | checksum_die_context (die: decl->die_parent, ctx); |
7352 | die_checksum_ordered (target_die, ctx, mark); |
7353 | } |
7354 | return; |
7355 | } |
7356 | |
7357 | CHECKSUM_ULEB128 ('A'); |
7358 | CHECKSUM_ULEB128 (at->dw_attr); |
7359 | |
7360 | switch (AT_class (a: at)) |
7361 | { |
7362 | case dw_val_class_const: |
7363 | case dw_val_class_const_implicit: |
7364 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
7365 | CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int); |
7366 | break; |
7367 | |
7368 | case dw_val_class_unsigned_const: |
7369 | case dw_val_class_unsigned_const_implicit: |
7370 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
7371 | CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned); |
7372 | break; |
7373 | |
7374 | case dw_val_class_const_double: |
7375 | CHECKSUM_ULEB128 (DW_FORM_block); |
7376 | CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double)); |
7377 | CHECKSUM (at->dw_attr_val.v.val_double); |
7378 | break; |
7379 | |
7380 | case dw_val_class_wide_int: |
7381 | CHECKSUM_ULEB128 (DW_FORM_block); |
7382 | CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide) |
7383 | * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
7384 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (), |
7385 | get_full_len (*at->dw_attr_val.v.val_wide) |
7386 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
7387 | break; |
7388 | |
7389 | case dw_val_class_vec: |
7390 | CHECKSUM_ULEB128 (DW_FORM_block); |
7391 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length |
7392 | * at->dw_attr_val.v.val_vec.elt_size); |
7393 | CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array, |
7394 | (at->dw_attr_val.v.val_vec.length |
7395 | * at->dw_attr_val.v.val_vec.elt_size)); |
7396 | break; |
7397 | |
7398 | case dw_val_class_flag: |
7399 | CHECKSUM_ULEB128 (DW_FORM_flag); |
7400 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0); |
7401 | break; |
7402 | |
7403 | case dw_val_class_str: |
7404 | CHECKSUM_ULEB128 (DW_FORM_string); |
7405 | CHECKSUM_STRING (AT_string (at)); |
7406 | break; |
7407 | |
7408 | case dw_val_class_addr: |
7409 | r = AT_addr (a: at); |
7410 | gcc_assert (GET_CODE (r) == SYMBOL_REF); |
7411 | CHECKSUM_ULEB128 (DW_FORM_string); |
7412 | CHECKSUM_STRING (XSTR (r, 0)); |
7413 | break; |
7414 | |
7415 | case dw_val_class_offset: |
7416 | CHECKSUM_ULEB128 (DW_FORM_sdata); |
7417 | CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset); |
7418 | break; |
7419 | |
7420 | case dw_val_class_loc: |
7421 | for (loc = AT_loc (a: at); loc; loc = loc->dw_loc_next) |
7422 | loc_checksum_ordered (loc, ctx); |
7423 | break; |
7424 | |
7425 | case dw_val_class_fde_ref: |
7426 | case dw_val_class_symview: |
7427 | case dw_val_class_lbl_id: |
7428 | case dw_val_class_lineptr: |
7429 | case dw_val_class_macptr: |
7430 | case dw_val_class_loclistsptr: |
7431 | case dw_val_class_high_pc: |
7432 | break; |
7433 | |
7434 | case dw_val_class_file: |
7435 | case dw_val_class_file_implicit: |
7436 | CHECKSUM_ULEB128 (DW_FORM_string); |
7437 | CHECKSUM_STRING (AT_file (at)->filename); |
7438 | break; |
7439 | |
7440 | case dw_val_class_data8: |
7441 | CHECKSUM (at->dw_attr_val.v.val_data8); |
7442 | break; |
7443 | |
7444 | default: |
7445 | break; |
7446 | } |
7447 | } |
7448 | |
7449 | struct checksum_attributes |
7450 | { |
7451 | dw_attr_node *at_name; |
7452 | dw_attr_node *at_type; |
7453 | dw_attr_node *at_friend; |
7454 | dw_attr_node *at_accessibility; |
7455 | dw_attr_node *at_address_class; |
7456 | dw_attr_node *at_alignment; |
7457 | dw_attr_node *at_allocated; |
7458 | dw_attr_node *at_artificial; |
7459 | dw_attr_node *at_associated; |
7460 | dw_attr_node *at_binary_scale; |
7461 | dw_attr_node *at_bit_offset; |
7462 | dw_attr_node *at_bit_size; |
7463 | dw_attr_node *at_bit_stride; |
7464 | dw_attr_node *at_byte_size; |
7465 | dw_attr_node *at_byte_stride; |
7466 | dw_attr_node *at_const_value; |
7467 | dw_attr_node *at_containing_type; |
7468 | dw_attr_node *at_count; |
7469 | dw_attr_node *at_data_location; |
7470 | dw_attr_node *at_data_member_location; |
7471 | dw_attr_node *at_decimal_scale; |
7472 | dw_attr_node *at_decimal_sign; |
7473 | dw_attr_node *at_default_value; |
7474 | dw_attr_node *at_digit_count; |
7475 | dw_attr_node *at_discr; |
7476 | dw_attr_node *at_discr_list; |
7477 | dw_attr_node *at_discr_value; |
7478 | dw_attr_node *at_encoding; |
7479 | dw_attr_node *at_endianity; |
7480 | dw_attr_node *at_explicit; |
7481 | dw_attr_node *at_is_optional; |
7482 | dw_attr_node *at_location; |
7483 | dw_attr_node *at_lower_bound; |
7484 | dw_attr_node *at_mutable; |
7485 | dw_attr_node *at_ordering; |
7486 | dw_attr_node *at_picture_string; |
7487 | dw_attr_node *at_prototyped; |
7488 | dw_attr_node *at_small; |
7489 | dw_attr_node *at_segment; |
7490 | dw_attr_node *at_string_length; |
7491 | dw_attr_node *at_string_length_bit_size; |
7492 | dw_attr_node *at_string_length_byte_size; |
7493 | dw_attr_node *at_threads_scaled; |
7494 | dw_attr_node *at_upper_bound; |
7495 | dw_attr_node *at_use_location; |
7496 | dw_attr_node *at_use_UTF8; |
7497 | dw_attr_node *at_variable_parameter; |
7498 | dw_attr_node *at_virtuality; |
7499 | dw_attr_node *at_visibility; |
7500 | dw_attr_node *at_vtable_elem_location; |
7501 | }; |
7502 | |
7503 | /* Collect the attributes that we will want to use for the checksum. */ |
7504 | |
7505 | static void |
7506 | collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die) |
7507 | { |
7508 | dw_attr_node *a; |
7509 | unsigned ix; |
7510 | |
7511 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
7512 | { |
7513 | switch (a->dw_attr) |
7514 | { |
7515 | case DW_AT_name: |
7516 | attrs->at_name = a; |
7517 | break; |
7518 | case DW_AT_type: |
7519 | attrs->at_type = a; |
7520 | break; |
7521 | case DW_AT_friend: |
7522 | attrs->at_friend = a; |
7523 | break; |
7524 | case DW_AT_accessibility: |
7525 | attrs->at_accessibility = a; |
7526 | break; |
7527 | case DW_AT_address_class: |
7528 | attrs->at_address_class = a; |
7529 | break; |
7530 | case DW_AT_alignment: |
7531 | attrs->at_alignment = a; |
7532 | break; |
7533 | case DW_AT_allocated: |
7534 | attrs->at_allocated = a; |
7535 | break; |
7536 | case DW_AT_artificial: |
7537 | attrs->at_artificial = a; |
7538 | break; |
7539 | case DW_AT_associated: |
7540 | attrs->at_associated = a; |
7541 | break; |
7542 | case DW_AT_binary_scale: |
7543 | attrs->at_binary_scale = a; |
7544 | break; |
7545 | case DW_AT_bit_offset: |
7546 | attrs->at_bit_offset = a; |
7547 | break; |
7548 | case DW_AT_bit_size: |
7549 | attrs->at_bit_size = a; |
7550 | break; |
7551 | case DW_AT_bit_stride: |
7552 | attrs->at_bit_stride = a; |
7553 | break; |
7554 | case DW_AT_byte_size: |
7555 | attrs->at_byte_size = a; |
7556 | break; |
7557 | case DW_AT_byte_stride: |
7558 | attrs->at_byte_stride = a; |
7559 | break; |
7560 | case DW_AT_const_value: |
7561 | attrs->at_const_value = a; |
7562 | break; |
7563 | case DW_AT_containing_type: |
7564 | attrs->at_containing_type = a; |
7565 | break; |
7566 | case DW_AT_count: |
7567 | attrs->at_count = a; |
7568 | break; |
7569 | case DW_AT_data_location: |
7570 | attrs->at_data_location = a; |
7571 | break; |
7572 | case DW_AT_data_member_location: |
7573 | attrs->at_data_member_location = a; |
7574 | break; |
7575 | case DW_AT_decimal_scale: |
7576 | attrs->at_decimal_scale = a; |
7577 | break; |
7578 | case DW_AT_decimal_sign: |
7579 | attrs->at_decimal_sign = a; |
7580 | break; |
7581 | case DW_AT_default_value: |
7582 | attrs->at_default_value = a; |
7583 | break; |
7584 | case DW_AT_digit_count: |
7585 | attrs->at_digit_count = a; |
7586 | break; |
7587 | case DW_AT_discr: |
7588 | attrs->at_discr = a; |
7589 | break; |
7590 | case DW_AT_discr_list: |
7591 | attrs->at_discr_list = a; |
7592 | break; |
7593 | case DW_AT_discr_value: |
7594 | attrs->at_discr_value = a; |
7595 | break; |
7596 | case DW_AT_encoding: |
7597 | attrs->at_encoding = a; |
7598 | break; |
7599 | case DW_AT_endianity: |
7600 | attrs->at_endianity = a; |
7601 | break; |
7602 | case DW_AT_explicit: |
7603 | attrs->at_explicit = a; |
7604 | break; |
7605 | case DW_AT_is_optional: |
7606 | attrs->at_is_optional = a; |
7607 | break; |
7608 | case DW_AT_location: |
7609 | attrs->at_location = a; |
7610 | break; |
7611 | case DW_AT_lower_bound: |
7612 | attrs->at_lower_bound = a; |
7613 | break; |
7614 | case DW_AT_mutable: |
7615 | attrs->at_mutable = a; |
7616 | break; |
7617 | case DW_AT_ordering: |
7618 | attrs->at_ordering = a; |
7619 | break; |
7620 | case DW_AT_picture_string: |
7621 | attrs->at_picture_string = a; |
7622 | break; |
7623 | case DW_AT_prototyped: |
7624 | attrs->at_prototyped = a; |
7625 | break; |
7626 | case DW_AT_small: |
7627 | attrs->at_small = a; |
7628 | break; |
7629 | case DW_AT_segment: |
7630 | attrs->at_segment = a; |
7631 | break; |
7632 | case DW_AT_string_length: |
7633 | attrs->at_string_length = a; |
7634 | break; |
7635 | case DW_AT_string_length_bit_size: |
7636 | attrs->at_string_length_bit_size = a; |
7637 | break; |
7638 | case DW_AT_string_length_byte_size: |
7639 | attrs->at_string_length_byte_size = a; |
7640 | break; |
7641 | case DW_AT_threads_scaled: |
7642 | attrs->at_threads_scaled = a; |
7643 | break; |
7644 | case DW_AT_upper_bound: |
7645 | attrs->at_upper_bound = a; |
7646 | break; |
7647 | case DW_AT_use_location: |
7648 | attrs->at_use_location = a; |
7649 | break; |
7650 | case DW_AT_use_UTF8: |
7651 | attrs->at_use_UTF8 = a; |
7652 | break; |
7653 | case DW_AT_variable_parameter: |
7654 | attrs->at_variable_parameter = a; |
7655 | break; |
7656 | case DW_AT_virtuality: |
7657 | attrs->at_virtuality = a; |
7658 | break; |
7659 | case DW_AT_visibility: |
7660 | attrs->at_visibility = a; |
7661 | break; |
7662 | case DW_AT_vtable_elem_location: |
7663 | attrs->at_vtable_elem_location = a; |
7664 | break; |
7665 | default: |
7666 | break; |
7667 | } |
7668 | } |
7669 | } |
7670 | |
7671 | /* Calculate the checksum of a DIE, using an ordered subset of attributes. */ |
7672 | |
7673 | static void |
7674 | die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark) |
7675 | { |
7676 | dw_die_ref c; |
7677 | dw_die_ref decl; |
7678 | struct checksum_attributes attrs; |
7679 | |
7680 | CHECKSUM_ULEB128 ('D'); |
7681 | CHECKSUM_ULEB128 (die->die_tag); |
7682 | |
7683 | memset (s: &attrs, c: 0, n: sizeof (attrs)); |
7684 | |
7685 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
7686 | if (decl != NULL) |
7687 | collect_checksum_attributes (attrs: &attrs, die: decl); |
7688 | collect_checksum_attributes (attrs: &attrs, die); |
7689 | |
7690 | CHECKSUM_ATTR (attrs.at_name); |
7691 | CHECKSUM_ATTR (attrs.at_accessibility); |
7692 | CHECKSUM_ATTR (attrs.at_address_class); |
7693 | CHECKSUM_ATTR (attrs.at_allocated); |
7694 | CHECKSUM_ATTR (attrs.at_artificial); |
7695 | CHECKSUM_ATTR (attrs.at_associated); |
7696 | CHECKSUM_ATTR (attrs.at_binary_scale); |
7697 | CHECKSUM_ATTR (attrs.at_bit_offset); |
7698 | CHECKSUM_ATTR (attrs.at_bit_size); |
7699 | CHECKSUM_ATTR (attrs.at_bit_stride); |
7700 | CHECKSUM_ATTR (attrs.at_byte_size); |
7701 | CHECKSUM_ATTR (attrs.at_byte_stride); |
7702 | CHECKSUM_ATTR (attrs.at_const_value); |
7703 | CHECKSUM_ATTR (attrs.at_containing_type); |
7704 | CHECKSUM_ATTR (attrs.at_count); |
7705 | CHECKSUM_ATTR (attrs.at_data_location); |
7706 | CHECKSUM_ATTR (attrs.at_data_member_location); |
7707 | CHECKSUM_ATTR (attrs.at_decimal_scale); |
7708 | CHECKSUM_ATTR (attrs.at_decimal_sign); |
7709 | CHECKSUM_ATTR (attrs.at_default_value); |
7710 | CHECKSUM_ATTR (attrs.at_digit_count); |
7711 | CHECKSUM_ATTR (attrs.at_discr); |
7712 | CHECKSUM_ATTR (attrs.at_discr_list); |
7713 | CHECKSUM_ATTR (attrs.at_discr_value); |
7714 | CHECKSUM_ATTR (attrs.at_encoding); |
7715 | CHECKSUM_ATTR (attrs.at_endianity); |
7716 | CHECKSUM_ATTR (attrs.at_explicit); |
7717 | CHECKSUM_ATTR (attrs.at_is_optional); |
7718 | CHECKSUM_ATTR (attrs.at_location); |
7719 | CHECKSUM_ATTR (attrs.at_lower_bound); |
7720 | CHECKSUM_ATTR (attrs.at_mutable); |
7721 | CHECKSUM_ATTR (attrs.at_ordering); |
7722 | CHECKSUM_ATTR (attrs.at_picture_string); |
7723 | CHECKSUM_ATTR (attrs.at_prototyped); |
7724 | CHECKSUM_ATTR (attrs.at_small); |
7725 | CHECKSUM_ATTR (attrs.at_segment); |
7726 | CHECKSUM_ATTR (attrs.at_string_length); |
7727 | CHECKSUM_ATTR (attrs.at_string_length_bit_size); |
7728 | CHECKSUM_ATTR (attrs.at_string_length_byte_size); |
7729 | CHECKSUM_ATTR (attrs.at_threads_scaled); |
7730 | CHECKSUM_ATTR (attrs.at_upper_bound); |
7731 | CHECKSUM_ATTR (attrs.at_use_location); |
7732 | CHECKSUM_ATTR (attrs.at_use_UTF8); |
7733 | CHECKSUM_ATTR (attrs.at_variable_parameter); |
7734 | CHECKSUM_ATTR (attrs.at_virtuality); |
7735 | CHECKSUM_ATTR (attrs.at_visibility); |
7736 | CHECKSUM_ATTR (attrs.at_vtable_elem_location); |
7737 | CHECKSUM_ATTR (attrs.at_type); |
7738 | CHECKSUM_ATTR (attrs.at_friend); |
7739 | CHECKSUM_ATTR (attrs.at_alignment); |
7740 | |
7741 | /* Checksum the child DIEs. */ |
7742 | c = die->die_child; |
7743 | if (c) do { |
7744 | dw_attr_node *name_attr; |
7745 | |
7746 | c = c->die_sib; |
7747 | name_attr = get_AT (die: c, attr_kind: DW_AT_name); |
7748 | if (is_template_instantiation (c)) |
7749 | { |
7750 | /* Ignore instantiations of member type and function templates. */ |
7751 | } |
7752 | else if (name_attr != NULL |
7753 | && (is_type_die (c) || c->die_tag == DW_TAG_subprogram)) |
7754 | { |
7755 | /* Use a shallow checksum for named nested types and member |
7756 | functions. */ |
7757 | CHECKSUM_ULEB128 ('S'); |
7758 | CHECKSUM_ULEB128 (c->die_tag); |
7759 | CHECKSUM_STRING (AT_string (name_attr)); |
7760 | } |
7761 | else |
7762 | { |
7763 | /* Use a deep checksum for other children. */ |
7764 | /* Mark this DIE so it gets processed when unmarking. */ |
7765 | if (c->die_mark == 0) |
7766 | c->die_mark = -1; |
7767 | die_checksum_ordered (die: c, ctx, mark); |
7768 | } |
7769 | } while (c != die->die_child); |
7770 | |
7771 | CHECKSUM_ULEB128 (0); |
7772 | } |
7773 | |
7774 | /* Add a type name and tag to a hash. */ |
7775 | static void |
7776 | die_odr_checksum (int tag, const char *name, md5_ctx *ctx) |
7777 | { |
7778 | CHECKSUM_ULEB128 (tag); |
7779 | CHECKSUM_STRING (name); |
7780 | } |
7781 | |
7782 | #undef CHECKSUM |
7783 | #undef CHECKSUM_STRING |
7784 | #undef CHECKSUM_ATTR |
7785 | #undef CHECKSUM_LEB128 |
7786 | #undef CHECKSUM_ULEB128 |
7787 | |
7788 | /* Generate the type signature for DIE. This is computed by generating an |
7789 | MD5 checksum over the DIE's tag, its relevant attributes, and its |
7790 | children. Attributes that are references to other DIEs are processed |
7791 | by recursion, using the MARK field to prevent infinite recursion. |
7792 | If the DIE is nested inside a namespace or another type, we also |
7793 | need to include that context in the signature. The lower 64 bits |
7794 | of the resulting MD5 checksum comprise the signature. */ |
7795 | |
7796 | static void |
7797 | generate_type_signature (dw_die_ref die, comdat_type_node *type_node) |
7798 | { |
7799 | int mark; |
7800 | const char *name; |
7801 | unsigned char checksum[16]; |
7802 | struct md5_ctx ctx; |
7803 | dw_die_ref decl; |
7804 | dw_die_ref parent; |
7805 | |
7806 | name = get_AT_string (die, attr_kind: DW_AT_name); |
7807 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
7808 | parent = get_die_parent (die); |
7809 | |
7810 | /* First, compute a signature for just the type name (and its surrounding |
7811 | context, if any. This is stored in the type unit DIE for link-time |
7812 | ODR (one-definition rule) checking. */ |
7813 | |
7814 | if (is_cxx () && name != NULL) |
7815 | { |
7816 | md5_init_ctx (ctx: &ctx); |
7817 | |
7818 | /* Checksum the names of surrounding namespaces and structures. */ |
7819 | if (parent != NULL) |
7820 | checksum_die_context (die: parent, ctx: &ctx); |
7821 | |
7822 | /* Checksum the current DIE. */ |
7823 | die_odr_checksum (tag: die->die_tag, name, ctx: &ctx); |
7824 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
7825 | |
7826 | add_AT_data8 (die: type_node->root_die, attr_kind: DW_AT_GNU_odr_signature, data8: &checksum[8]); |
7827 | } |
7828 | |
7829 | /* Next, compute the complete type signature. */ |
7830 | |
7831 | md5_init_ctx (ctx: &ctx); |
7832 | mark = 1; |
7833 | die->die_mark = mark; |
7834 | |
7835 | /* Checksum the names of surrounding namespaces and structures. */ |
7836 | if (parent != NULL) |
7837 | checksum_die_context (die: parent, ctx: &ctx); |
7838 | |
7839 | /* Checksum the DIE and its children. */ |
7840 | die_checksum_ordered (die, ctx: &ctx, mark: &mark); |
7841 | unmark_all_dies (die); |
7842 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
7843 | |
7844 | /* Store the signature in the type node and link the type DIE and the |
7845 | type node together. */ |
7846 | memcpy (dest: type_node->signature, src: &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE], |
7847 | DWARF_TYPE_SIGNATURE_SIZE); |
7848 | die->comdat_type_p = true; |
7849 | die->die_id.die_type_node = type_node; |
7850 | type_node->type_die = die; |
7851 | |
7852 | /* If the DIE is a specification, link its declaration to the type node |
7853 | as well. */ |
7854 | if (decl != NULL) |
7855 | { |
7856 | decl->comdat_type_p = true; |
7857 | decl->die_id.die_type_node = type_node; |
7858 | } |
7859 | } |
7860 | |
7861 | /* Do the location expressions look same? */ |
7862 | static inline bool |
7863 | same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark) |
7864 | { |
7865 | return loc1->dw_loc_opc == loc2->dw_loc_opc |
7866 | && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark) |
7867 | && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark); |
7868 | } |
7869 | |
7870 | /* Do the values look the same? */ |
7871 | static bool |
7872 | same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark) |
7873 | { |
7874 | dw_loc_descr_ref loc1, loc2; |
7875 | rtx r1, r2; |
7876 | |
7877 | if (v1->val_class != v2->val_class) |
7878 | return false; |
7879 | |
7880 | switch (v1->val_class) |
7881 | { |
7882 | case dw_val_class_const: |
7883 | case dw_val_class_const_implicit: |
7884 | return v1->v.val_int == v2->v.val_int; |
7885 | case dw_val_class_unsigned_const: |
7886 | case dw_val_class_unsigned_const_implicit: |
7887 | return v1->v.val_unsigned == v2->v.val_unsigned; |
7888 | case dw_val_class_const_double: |
7889 | return v1->v.val_double.high == v2->v.val_double.high |
7890 | && v1->v.val_double.low == v2->v.val_double.low; |
7891 | case dw_val_class_wide_int: |
7892 | return *v1->v.val_wide == *v2->v.val_wide; |
7893 | case dw_val_class_vec: |
7894 | if (v1->v.val_vec.length != v2->v.val_vec.length |
7895 | || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size) |
7896 | return false; |
7897 | if (memcmp (s1: v1->v.val_vec.array, s2: v2->v.val_vec.array, |
7898 | n: v1->v.val_vec.length * v1->v.val_vec.elt_size)) |
7899 | return false; |
7900 | return true; |
7901 | case dw_val_class_flag: |
7902 | return v1->v.val_flag == v2->v.val_flag; |
7903 | case dw_val_class_str: |
7904 | return !strcmp (s1: v1->v.val_str->str, s2: v2->v.val_str->str); |
7905 | |
7906 | case dw_val_class_addr: |
7907 | r1 = v1->v.val_addr; |
7908 | r2 = v2->v.val_addr; |
7909 | if (GET_CODE (r1) != GET_CODE (r2)) |
7910 | return false; |
7911 | return !rtx_equal_p (r1, r2); |
7912 | |
7913 | case dw_val_class_offset: |
7914 | return v1->v.val_offset == v2->v.val_offset; |
7915 | |
7916 | case dw_val_class_loc: |
7917 | for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc; |
7918 | loc1 && loc2; |
7919 | loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next) |
7920 | if (!same_loc_p (loc1, loc2, mark)) |
7921 | return false; |
7922 | return !loc1 && !loc2; |
7923 | |
7924 | case dw_val_class_die_ref: |
7925 | return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark); |
7926 | |
7927 | case dw_val_class_symview: |
7928 | return strcmp (s1: v1->v.val_symbolic_view, s2: v2->v.val_symbolic_view) == 0; |
7929 | |
7930 | case dw_val_class_fde_ref: |
7931 | case dw_val_class_vms_delta: |
7932 | case dw_val_class_lbl_id: |
7933 | case dw_val_class_lineptr: |
7934 | case dw_val_class_macptr: |
7935 | case dw_val_class_loclistsptr: |
7936 | case dw_val_class_high_pc: |
7937 | return true; |
7938 | |
7939 | case dw_val_class_file: |
7940 | case dw_val_class_file_implicit: |
7941 | return v1->v.val_file == v2->v.val_file; |
7942 | |
7943 | case dw_val_class_data8: |
7944 | return !memcmp (s1: v1->v.val_data8, s2: v2->v.val_data8, n: 8); |
7945 | |
7946 | default: |
7947 | return true; |
7948 | } |
7949 | } |
7950 | |
7951 | /* Do the attributes look the same? */ |
7952 | |
7953 | static bool |
7954 | same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark) |
7955 | { |
7956 | if (at1->dw_attr != at2->dw_attr) |
7957 | return false; |
7958 | |
7959 | /* We don't care that this was compiled with a different compiler |
7960 | snapshot; if the output is the same, that's what matters. */ |
7961 | if (at1->dw_attr == DW_AT_producer) |
7962 | return true; |
7963 | |
7964 | return same_dw_val_p (v1: &at1->dw_attr_val, v2: &at2->dw_attr_val, mark); |
7965 | } |
7966 | |
7967 | /* Do the dies look the same? */ |
7968 | |
7969 | static bool |
7970 | same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark) |
7971 | { |
7972 | dw_die_ref c1, c2; |
7973 | dw_attr_node *a1; |
7974 | unsigned ix; |
7975 | |
7976 | /* To avoid infinite recursion. */ |
7977 | if (die1->die_mark) |
7978 | return die1->die_mark == die2->die_mark; |
7979 | die1->die_mark = die2->die_mark = ++(*mark); |
7980 | |
7981 | if (die1->die_tag != die2->die_tag) |
7982 | return false; |
7983 | |
7984 | if (vec_safe_length (v: die1->die_attr) != vec_safe_length (v: die2->die_attr)) |
7985 | return false; |
7986 | |
7987 | FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1) |
7988 | if (!same_attr_p (at1: a1, at2: &(*die2->die_attr)[ix], mark)) |
7989 | return false; |
7990 | |
7991 | c1 = die1->die_child; |
7992 | c2 = die2->die_child; |
7993 | if (! c1) |
7994 | { |
7995 | if (c2) |
7996 | return false; |
7997 | } |
7998 | else |
7999 | for (;;) |
8000 | { |
8001 | if (!same_die_p (die1: c1, die2: c2, mark)) |
8002 | return false; |
8003 | c1 = c1->die_sib; |
8004 | c2 = c2->die_sib; |
8005 | if (c1 == die1->die_child) |
8006 | { |
8007 | if (c2 == die2->die_child) |
8008 | break; |
8009 | else |
8010 | return false; |
8011 | } |
8012 | } |
8013 | |
8014 | return true; |
8015 | } |
8016 | |
8017 | /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its |
8018 | children, and set die_symbol. */ |
8019 | |
8020 | static void |
8021 | compute_comp_unit_symbol (dw_die_ref unit_die) |
8022 | { |
8023 | const char *die_name = get_AT_string (die: unit_die, attr_kind: DW_AT_name); |
8024 | const char *base = die_name ? lbasename (die_name) : "anonymous" ; |
8025 | char *name = XALLOCAVEC (char, strlen (base) + 64); |
8026 | char *p; |
8027 | int i, mark; |
8028 | unsigned char checksum[16]; |
8029 | struct md5_ctx ctx; |
8030 | |
8031 | /* Compute the checksum of the DIE, then append part of it as hex digits to |
8032 | the name filename of the unit. */ |
8033 | |
8034 | md5_init_ctx (ctx: &ctx); |
8035 | mark = 0; |
8036 | die_checksum (die: unit_die, ctx: &ctx, mark: &mark); |
8037 | unmark_all_dies (unit_die); |
8038 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
8039 | |
8040 | /* When we this for comp_unit_die () we have a DW_AT_name that might |
8041 | not start with a letter but with anything valid for filenames and |
8042 | clean_symbol_name doesn't fix that up. Prepend 'g' if the first |
8043 | character is not a letter. */ |
8044 | sprintf (s: name, format: "%s%s." , ISALPHA (*base) ? "" : "g" , base); |
8045 | clean_symbol_name (name); |
8046 | |
8047 | p = name + strlen (s: name); |
8048 | for (i = 0; i < 4; i++) |
8049 | { |
8050 | sprintf (s: p, format: "%.2x" , checksum[i]); |
8051 | p += 2; |
8052 | } |
8053 | |
8054 | unit_die->die_id.die_symbol = xstrdup (name); |
8055 | } |
8056 | |
8057 | /* Returns true if DIE represents a type, in the sense of TYPE_P. */ |
8058 | |
8059 | static bool |
8060 | is_type_die (dw_die_ref die) |
8061 | { |
8062 | switch (die->die_tag) |
8063 | { |
8064 | case DW_TAG_array_type: |
8065 | case DW_TAG_class_type: |
8066 | case DW_TAG_interface_type: |
8067 | case DW_TAG_enumeration_type: |
8068 | case DW_TAG_pointer_type: |
8069 | case DW_TAG_reference_type: |
8070 | case DW_TAG_rvalue_reference_type: |
8071 | case DW_TAG_string_type: |
8072 | case DW_TAG_structure_type: |
8073 | case DW_TAG_subroutine_type: |
8074 | case DW_TAG_union_type: |
8075 | case DW_TAG_ptr_to_member_type: |
8076 | case DW_TAG_set_type: |
8077 | case DW_TAG_subrange_type: |
8078 | case DW_TAG_base_type: |
8079 | case DW_TAG_const_type: |
8080 | case DW_TAG_file_type: |
8081 | case DW_TAG_packed_type: |
8082 | case DW_TAG_volatile_type: |
8083 | case DW_TAG_typedef: |
8084 | return true; |
8085 | default: |
8086 | return false; |
8087 | } |
8088 | } |
8089 | |
8090 | /* Returns true iff C is a compile-unit DIE. */ |
8091 | |
8092 | static inline bool |
8093 | is_cu_die (dw_die_ref c) |
8094 | { |
8095 | return c && (c->die_tag == DW_TAG_compile_unit |
8096 | || c->die_tag == DW_TAG_skeleton_unit); |
8097 | } |
8098 | |
8099 | /* Returns true iff C is a unit DIE of some sort. */ |
8100 | |
8101 | static inline bool |
8102 | is_unit_die (dw_die_ref c) |
8103 | { |
8104 | return c && (c->die_tag == DW_TAG_compile_unit |
8105 | || c->die_tag == DW_TAG_partial_unit |
8106 | || c->die_tag == DW_TAG_type_unit |
8107 | || c->die_tag == DW_TAG_skeleton_unit); |
8108 | } |
8109 | |
8110 | /* Returns true iff C is a namespace DIE. */ |
8111 | |
8112 | static inline bool |
8113 | is_namespace_die (dw_die_ref c) |
8114 | { |
8115 | return c && c->die_tag == DW_TAG_namespace; |
8116 | } |
8117 | |
8118 | /* Return true if this DIE is a template parameter. */ |
8119 | |
8120 | static inline bool |
8121 | is_template_parameter (dw_die_ref die) |
8122 | { |
8123 | switch (die->die_tag) |
8124 | { |
8125 | case DW_TAG_template_type_param: |
8126 | case DW_TAG_template_value_param: |
8127 | case DW_TAG_GNU_template_template_param: |
8128 | case DW_TAG_GNU_template_parameter_pack: |
8129 | return true; |
8130 | default: |
8131 | return false; |
8132 | } |
8133 | } |
8134 | |
8135 | /* Return true if this DIE represents a template instantiation. */ |
8136 | |
8137 | static inline bool |
8138 | is_template_instantiation (dw_die_ref die) |
8139 | { |
8140 | dw_die_ref c; |
8141 | |
8142 | if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram) |
8143 | return false; |
8144 | FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true); |
8145 | return false; |
8146 | } |
8147 | |
8148 | static char * |
8149 | gen_internal_sym (const char *prefix) |
8150 | { |
8151 | char buf[MAX_ARTIFICIAL_LABEL_BYTES]; |
8152 | |
8153 | ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++); |
8154 | return xstrdup (buf); |
8155 | } |
8156 | |
8157 | /* Return true if this DIE is a declaration. */ |
8158 | |
8159 | static bool |
8160 | is_declaration_die (dw_die_ref die) |
8161 | { |
8162 | dw_attr_node *a; |
8163 | unsigned ix; |
8164 | |
8165 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8166 | if (a->dw_attr == DW_AT_declaration) |
8167 | return true; |
8168 | |
8169 | return false; |
8170 | } |
8171 | |
8172 | /* Return true if this DIE is nested inside a subprogram. */ |
8173 | |
8174 | static bool |
8175 | is_nested_in_subprogram (dw_die_ref die) |
8176 | { |
8177 | dw_die_ref decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
8178 | |
8179 | if (decl == NULL) |
8180 | decl = die; |
8181 | return local_scope_p (decl); |
8182 | } |
8183 | |
8184 | /* Return true if this DIE contains a defining declaration of a |
8185 | subprogram. */ |
8186 | |
8187 | static bool |
8188 | contains_subprogram_definition (dw_die_ref die) |
8189 | { |
8190 | dw_die_ref c; |
8191 | |
8192 | if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die)) |
8193 | return true; |
8194 | FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1); |
8195 | return false; |
8196 | } |
8197 | |
8198 | /* Return true if this is a type DIE that should be moved to a |
8199 | COMDAT .debug_types section or .debug_info section with DW_UT_*type |
8200 | unit type. */ |
8201 | |
8202 | static bool |
8203 | should_move_die_to_comdat (dw_die_ref die) |
8204 | { |
8205 | switch (die->die_tag) |
8206 | { |
8207 | case DW_TAG_class_type: |
8208 | case DW_TAG_structure_type: |
8209 | case DW_TAG_enumeration_type: |
8210 | case DW_TAG_union_type: |
8211 | /* Don't move declarations, inlined instances, types nested in a |
8212 | subprogram, or types that contain subprogram definitions. */ |
8213 | if (is_declaration_die (die) |
8214 | || get_AT (die, attr_kind: DW_AT_abstract_origin) |
8215 | || is_nested_in_subprogram (die) |
8216 | || contains_subprogram_definition (die)) |
8217 | return false; |
8218 | if (die->die_tag != DW_TAG_enumeration_type) |
8219 | { |
8220 | /* Don't move non-constant size aggregates. */ |
8221 | dw_attr_node *sz = get_AT (die, attr_kind: DW_AT_byte_size); |
8222 | if (sz == NULL |
8223 | || (AT_class (a: sz) != dw_val_class_unsigned_const |
8224 | && AT_class (a: sz) != dw_val_class_unsigned_const_implicit)) |
8225 | return false; |
8226 | } |
8227 | return true; |
8228 | case DW_TAG_array_type: |
8229 | case DW_TAG_interface_type: |
8230 | case DW_TAG_pointer_type: |
8231 | case DW_TAG_reference_type: |
8232 | case DW_TAG_rvalue_reference_type: |
8233 | case DW_TAG_string_type: |
8234 | case DW_TAG_subroutine_type: |
8235 | case DW_TAG_ptr_to_member_type: |
8236 | case DW_TAG_set_type: |
8237 | case DW_TAG_subrange_type: |
8238 | case DW_TAG_base_type: |
8239 | case DW_TAG_const_type: |
8240 | case DW_TAG_file_type: |
8241 | case DW_TAG_packed_type: |
8242 | case DW_TAG_volatile_type: |
8243 | case DW_TAG_typedef: |
8244 | default: |
8245 | return false; |
8246 | } |
8247 | } |
8248 | |
8249 | /* Make a clone of DIE. */ |
8250 | |
8251 | static dw_die_ref |
8252 | clone_die (dw_die_ref die) |
8253 | { |
8254 | dw_die_ref clone = new_die_raw (tag_value: die->die_tag); |
8255 | dw_attr_node *a; |
8256 | unsigned ix; |
8257 | |
8258 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8259 | add_dwarf_attr (die: clone, attr: a); |
8260 | |
8261 | return clone; |
8262 | } |
8263 | |
8264 | /* Make a clone of the tree rooted at DIE. */ |
8265 | |
8266 | static dw_die_ref |
8267 | clone_tree (dw_die_ref die) |
8268 | { |
8269 | dw_die_ref c; |
8270 | dw_die_ref clone = clone_die (die); |
8271 | |
8272 | FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c))); |
8273 | |
8274 | return clone; |
8275 | } |
8276 | |
8277 | /* Make a clone of DIE as a declaration. */ |
8278 | |
8279 | static dw_die_ref |
8280 | clone_as_declaration (dw_die_ref die) |
8281 | { |
8282 | dw_die_ref clone; |
8283 | dw_die_ref decl; |
8284 | dw_attr_node *a; |
8285 | unsigned ix; |
8286 | |
8287 | /* If the DIE is already a declaration, just clone it. */ |
8288 | if (is_declaration_die (die)) |
8289 | return clone_die (die); |
8290 | |
8291 | /* If the DIE is a specification, just clone its declaration DIE. */ |
8292 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
8293 | if (decl != NULL) |
8294 | { |
8295 | clone = clone_die (die: decl); |
8296 | if (die->comdat_type_p) |
8297 | add_AT_die_ref (die: clone, attr_kind: DW_AT_signature, targ_die: die); |
8298 | return clone; |
8299 | } |
8300 | |
8301 | clone = new_die_raw (tag_value: die->die_tag); |
8302 | |
8303 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8304 | { |
8305 | /* We don't want to copy over all attributes. |
8306 | For example we don't want DW_AT_byte_size because otherwise we will no |
8307 | longer have a declaration and GDB will treat it as a definition. */ |
8308 | |
8309 | switch (a->dw_attr) |
8310 | { |
8311 | case DW_AT_abstract_origin: |
8312 | case DW_AT_artificial: |
8313 | case DW_AT_containing_type: |
8314 | case DW_AT_external: |
8315 | case DW_AT_name: |
8316 | case DW_AT_type: |
8317 | case DW_AT_virtuality: |
8318 | case DW_AT_linkage_name: |
8319 | case DW_AT_MIPS_linkage_name: |
8320 | add_dwarf_attr (die: clone, attr: a); |
8321 | break; |
8322 | case DW_AT_byte_size: |
8323 | case DW_AT_alignment: |
8324 | default: |
8325 | break; |
8326 | } |
8327 | } |
8328 | |
8329 | if (die->comdat_type_p) |
8330 | add_AT_die_ref (die: clone, attr_kind: DW_AT_signature, targ_die: die); |
8331 | |
8332 | add_AT_flag (die: clone, attr_kind: DW_AT_declaration, flag: 1); |
8333 | return clone; |
8334 | } |
8335 | |
8336 | |
8337 | /* Structure to map a DIE in one CU to its copy in a comdat type unit. */ |
8338 | |
8339 | struct decl_table_entry |
8340 | { |
8341 | dw_die_ref orig; |
8342 | dw_die_ref copy; |
8343 | }; |
8344 | |
8345 | /* Helpers to manipulate hash table of copied declarations. */ |
8346 | |
8347 | /* Hashtable helpers. */ |
8348 | |
8349 | struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry> |
8350 | { |
8351 | typedef die_struct *compare_type; |
8352 | static inline hashval_t hash (const decl_table_entry *); |
8353 | static inline bool equal (const decl_table_entry *, const die_struct *); |
8354 | }; |
8355 | |
8356 | inline hashval_t |
8357 | decl_table_entry_hasher::hash (const decl_table_entry *entry) |
8358 | { |
8359 | return htab_hash_pointer (entry->orig); |
8360 | } |
8361 | |
8362 | inline bool |
8363 | decl_table_entry_hasher::equal (const decl_table_entry *entry1, |
8364 | const die_struct *entry2) |
8365 | { |
8366 | return entry1->orig == entry2; |
8367 | } |
8368 | |
8369 | typedef hash_table<decl_table_entry_hasher> decl_hash_type; |
8370 | |
8371 | /* Copy DIE and its ancestors, up to, but not including, the compile unit |
8372 | or type unit entry, to a new tree. Adds the new tree to UNIT and returns |
8373 | a pointer to the copy of DIE. If DECL_TABLE is provided, it is used |
8374 | to check if the ancestor has already been copied into UNIT. */ |
8375 | |
8376 | static dw_die_ref |
8377 | copy_ancestor_tree (dw_die_ref unit, dw_die_ref die, |
8378 | decl_hash_type *decl_table) |
8379 | { |
8380 | dw_die_ref parent = die->die_parent; |
8381 | dw_die_ref new_parent = unit; |
8382 | dw_die_ref copy; |
8383 | decl_table_entry **slot = NULL; |
8384 | struct decl_table_entry *entry = NULL; |
8385 | |
8386 | /* If DIE refers to a stub unfold that so we get the appropriate |
8387 | DIE registered as orig in decl_table. */ |
8388 | if (dw_die_ref c = get_AT_ref (die, attr_kind: DW_AT_signature)) |
8389 | die = c; |
8390 | |
8391 | if (decl_table) |
8392 | { |
8393 | /* Check if the entry has already been copied to UNIT. */ |
8394 | slot = decl_table->find_slot_with_hash (comparable: die, hash: htab_hash_pointer (die), |
8395 | insert: INSERT); |
8396 | if (*slot != HTAB_EMPTY_ENTRY) |
8397 | { |
8398 | entry = *slot; |
8399 | return entry->copy; |
8400 | } |
8401 | |
8402 | /* Record in DECL_TABLE that DIE has been copied to UNIT. */ |
8403 | entry = XCNEW (struct decl_table_entry); |
8404 | entry->orig = die; |
8405 | entry->copy = NULL; |
8406 | *slot = entry; |
8407 | } |
8408 | |
8409 | if (parent != NULL) |
8410 | { |
8411 | dw_die_ref spec = get_AT_ref (die: parent, attr_kind: DW_AT_specification); |
8412 | if (spec != NULL) |
8413 | parent = spec; |
8414 | if (!is_unit_die (c: parent)) |
8415 | new_parent = copy_ancestor_tree (unit, die: parent, decl_table); |
8416 | } |
8417 | |
8418 | copy = clone_as_declaration (die); |
8419 | add_child_die (die: new_parent, child_die: copy); |
8420 | |
8421 | if (decl_table) |
8422 | { |
8423 | /* Record the pointer to the copy. */ |
8424 | entry->copy = copy; |
8425 | } |
8426 | |
8427 | return copy; |
8428 | } |
8429 | /* Copy the declaration context to the new type unit DIE. This includes |
8430 | any surrounding namespace or type declarations. If the DIE has an |
8431 | AT_specification attribute, it also includes attributes and children |
8432 | attached to the specification, and returns a pointer to the original |
8433 | parent of the declaration DIE. Returns NULL otherwise. */ |
8434 | |
8435 | static dw_die_ref |
8436 | copy_declaration_context (dw_die_ref unit, dw_die_ref die) |
8437 | { |
8438 | dw_die_ref decl; |
8439 | dw_die_ref new_decl; |
8440 | dw_die_ref orig_parent = NULL; |
8441 | |
8442 | decl = get_AT_ref (die, attr_kind: DW_AT_specification); |
8443 | if (decl == NULL) |
8444 | decl = die; |
8445 | else |
8446 | { |
8447 | unsigned ix; |
8448 | dw_die_ref c; |
8449 | dw_attr_node *a; |
8450 | |
8451 | /* The original DIE will be changed to a declaration, and must |
8452 | be moved to be a child of the original declaration DIE. */ |
8453 | orig_parent = decl->die_parent; |
8454 | |
8455 | /* Copy the type node pointer from the new DIE to the original |
8456 | declaration DIE so we can forward references later. */ |
8457 | decl->comdat_type_p = true; |
8458 | decl->die_id.die_type_node = die->die_id.die_type_node; |
8459 | |
8460 | remove_AT (die, attr_kind: DW_AT_specification); |
8461 | |
8462 | FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a) |
8463 | { |
8464 | if (a->dw_attr != DW_AT_name |
8465 | && a->dw_attr != DW_AT_declaration |
8466 | && a->dw_attr != DW_AT_external) |
8467 | add_dwarf_attr (die, attr: a); |
8468 | } |
8469 | |
8470 | FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c))); |
8471 | } |
8472 | |
8473 | if (decl->die_parent != NULL |
8474 | && !is_unit_die (c: decl->die_parent)) |
8475 | { |
8476 | new_decl = copy_ancestor_tree (unit, die: decl, NULL); |
8477 | if (new_decl != NULL) |
8478 | { |
8479 | remove_AT (die: new_decl, attr_kind: DW_AT_signature); |
8480 | add_AT_specification (die, targ_die: new_decl); |
8481 | } |
8482 | } |
8483 | |
8484 | return orig_parent; |
8485 | } |
8486 | |
8487 | /* Generate the skeleton ancestor tree for the given NODE, then clone |
8488 | the DIE and add the clone into the tree. */ |
8489 | |
8490 | static void |
8491 | generate_skeleton_ancestor_tree (skeleton_chain_node *node) |
8492 | { |
8493 | if (node->new_die != NULL) |
8494 | return; |
8495 | |
8496 | node->new_die = clone_as_declaration (die: node->old_die); |
8497 | |
8498 | if (node->parent != NULL) |
8499 | { |
8500 | generate_skeleton_ancestor_tree (node: node->parent); |
8501 | add_child_die (die: node->parent->new_die, child_die: node->new_die); |
8502 | } |
8503 | } |
8504 | |
8505 | /* Generate a skeleton tree of DIEs containing any declarations that are |
8506 | found in the original tree. We traverse the tree looking for declaration |
8507 | DIEs, and construct the skeleton from the bottom up whenever we find one. */ |
8508 | |
8509 | static void |
8510 | generate_skeleton_bottom_up (skeleton_chain_node *parent) |
8511 | { |
8512 | skeleton_chain_node node; |
8513 | dw_die_ref c; |
8514 | dw_die_ref first; |
8515 | dw_die_ref prev = NULL; |
8516 | dw_die_ref next = NULL; |
8517 | |
8518 | node.parent = parent; |
8519 | |
8520 | first = c = parent->old_die->die_child; |
8521 | if (c) |
8522 | next = c->die_sib; |
8523 | if (c) do { |
8524 | if (prev == NULL || prev->die_sib == c) |
8525 | prev = c; |
8526 | c = next; |
8527 | next = (c == first ? NULL : c->die_sib); |
8528 | node.old_die = c; |
8529 | node.new_die = NULL; |
8530 | if (is_declaration_die (die: c)) |
8531 | { |
8532 | if (is_template_instantiation (die: c)) |
8533 | { |
8534 | /* Instantiated templates do not need to be cloned into the |
8535 | type unit. Just move the DIE and its children back to |
8536 | the skeleton tree (in the main CU). */ |
8537 | remove_child_with_prev (child: c, prev); |
8538 | add_child_die (die: parent->new_die, child_die: c); |
8539 | c = prev; |
8540 | } |
8541 | else if (c->comdat_type_p) |
8542 | { |
8543 | /* This is the skeleton of earlier break_out_comdat_types |
8544 | type. Clone the existing DIE, but keep the children |
8545 | under the original (which is in the main CU). */ |
8546 | dw_die_ref clone = clone_die (die: c); |
8547 | |
8548 | replace_child (old_child: c, new_child: clone, prev); |
8549 | generate_skeleton_ancestor_tree (node: parent); |
8550 | add_child_die (die: parent->new_die, child_die: c); |
8551 | c = clone; |
8552 | continue; |
8553 | } |
8554 | else |
8555 | { |
8556 | /* Clone the existing DIE, move the original to the skeleton |
8557 | tree (which is in the main CU), and put the clone, with |
8558 | all the original's children, where the original came from |
8559 | (which is about to be moved to the type unit). */ |
8560 | dw_die_ref clone = clone_die (die: c); |
8561 | move_all_children (old_parent: c, new_parent: clone); |
8562 | |
8563 | /* If the original has a DW_AT_object_pointer attribute, |
8564 | it would now point to a child DIE just moved to the |
8565 | cloned tree, so we need to remove that attribute from |
8566 | the original. */ |
8567 | remove_AT (die: c, attr_kind: DW_AT_object_pointer); |
8568 | |
8569 | replace_child (old_child: c, new_child: clone, prev); |
8570 | generate_skeleton_ancestor_tree (node: parent); |
8571 | add_child_die (die: parent->new_die, child_die: c); |
8572 | node.old_die = clone; |
8573 | node.new_die = c; |
8574 | c = clone; |
8575 | } |
8576 | } |
8577 | generate_skeleton_bottom_up (parent: &node); |
8578 | } while (next != NULL); |
8579 | } |
8580 | |
8581 | /* Wrapper function for generate_skeleton_bottom_up. */ |
8582 | |
8583 | static dw_die_ref |
8584 | generate_skeleton (dw_die_ref die) |
8585 | { |
8586 | skeleton_chain_node node; |
8587 | |
8588 | node.old_die = die; |
8589 | node.new_die = NULL; |
8590 | node.parent = NULL; |
8591 | |
8592 | /* If this type definition is nested inside another type, |
8593 | and is not an instantiation of a template, always leave |
8594 | at least a declaration in its place. */ |
8595 | if (die->die_parent != NULL |
8596 | && is_type_die (die: die->die_parent) |
8597 | && !is_template_instantiation (die)) |
8598 | node.new_die = clone_as_declaration (die); |
8599 | |
8600 | generate_skeleton_bottom_up (parent: &node); |
8601 | return node.new_die; |
8602 | } |
8603 | |
8604 | /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned |
8605 | declaration. The original DIE is moved to a new compile unit so that |
8606 | existing references to it follow it to the new location. If any of the |
8607 | original DIE's descendants is a declaration, we need to replace the |
8608 | original DIE with a skeleton tree and move the declarations back into the |
8609 | skeleton tree. */ |
8610 | |
8611 | static dw_die_ref |
8612 | remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child, |
8613 | dw_die_ref prev) |
8614 | { |
8615 | dw_die_ref skeleton, orig_parent; |
8616 | |
8617 | /* Copy the declaration context to the type unit DIE. If the returned |
8618 | ORIG_PARENT is not NULL, the skeleton needs to be added as a child of |
8619 | that DIE. */ |
8620 | orig_parent = copy_declaration_context (unit, die: child); |
8621 | |
8622 | skeleton = generate_skeleton (die: child); |
8623 | if (skeleton == NULL) |
8624 | remove_child_with_prev (child, prev); |
8625 | else |
8626 | { |
8627 | skeleton->comdat_type_p = true; |
8628 | skeleton->die_id.die_type_node = child->die_id.die_type_node; |
8629 | |
8630 | /* If the original DIE was a specification, we need to put |
8631 | the skeleton under the parent DIE of the declaration. |
8632 | This leaves the original declaration in the tree, but |
8633 | it will be pruned later since there are no longer any |
8634 | references to it. */ |
8635 | if (orig_parent != NULL) |
8636 | { |
8637 | remove_child_with_prev (child, prev); |
8638 | add_child_die (die: orig_parent, child_die: skeleton); |
8639 | } |
8640 | else |
8641 | replace_child (old_child: child, new_child: skeleton, prev); |
8642 | } |
8643 | |
8644 | return skeleton; |
8645 | } |
8646 | |
8647 | static void |
8648 | copy_dwarf_procs_ref_in_attrs (dw_die_ref die, |
8649 | comdat_type_node *type_node, |
8650 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs); |
8651 | |
8652 | /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF |
8653 | procedure, put it under TYPE_NODE and return the copy. Continue looking for |
8654 | DWARF procedure references in the DW_AT_location attribute. */ |
8655 | |
8656 | static dw_die_ref |
8657 | copy_dwarf_procedure (dw_die_ref die, |
8658 | comdat_type_node *type_node, |
8659 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
8660 | { |
8661 | gcc_assert (die->die_tag == DW_TAG_dwarf_procedure); |
8662 | |
8663 | /* DWARF procedures are not supposed to have children... */ |
8664 | gcc_assert (die->die_child == NULL); |
8665 | |
8666 | /* ... and they are supposed to have only one attribute: DW_AT_location. */ |
8667 | gcc_assert (vec_safe_length (die->die_attr) == 1 |
8668 | && ((*die->die_attr)[0].dw_attr == DW_AT_location)); |
8669 | |
8670 | /* Do not copy more than once DWARF procedures. */ |
8671 | bool existed; |
8672 | dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (k: die, existed: &existed); |
8673 | if (existed) |
8674 | return die_copy; |
8675 | |
8676 | die_copy = clone_die (die); |
8677 | add_child_die (die: type_node->root_die, child_die: die_copy); |
8678 | copy_dwarf_procs_ref_in_attrs (die: die_copy, type_node, copied_dwarf_procs); |
8679 | return die_copy; |
8680 | } |
8681 | |
8682 | /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF |
8683 | procedures in DIE's attributes. */ |
8684 | |
8685 | static void |
8686 | copy_dwarf_procs_ref_in_attrs (dw_die_ref die, |
8687 | comdat_type_node *type_node, |
8688 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
8689 | { |
8690 | dw_attr_node *a; |
8691 | unsigned i; |
8692 | |
8693 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a) |
8694 | { |
8695 | dw_loc_descr_ref loc; |
8696 | |
8697 | if (a->dw_attr_val.val_class != dw_val_class_loc) |
8698 | continue; |
8699 | |
8700 | for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next) |
8701 | { |
8702 | switch (loc->dw_loc_opc) |
8703 | { |
8704 | case DW_OP_call2: |
8705 | case DW_OP_call4: |
8706 | case DW_OP_call_ref: |
8707 | gcc_assert (loc->dw_loc_oprnd1.val_class |
8708 | == dw_val_class_die_ref); |
8709 | loc->dw_loc_oprnd1.v.val_die_ref.die |
8710 | = copy_dwarf_procedure (die: loc->dw_loc_oprnd1.v.val_die_ref.die, |
8711 | type_node, |
8712 | copied_dwarf_procs); |
8713 | |
8714 | default: |
8715 | break; |
8716 | } |
8717 | } |
8718 | } |
8719 | } |
8720 | |
8721 | /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and |
8722 | rewrite references to point to the copies. |
8723 | |
8724 | References are looked for in DIE's attributes and recursively in all its |
8725 | children attributes that are location descriptions. COPIED_DWARF_PROCS is a |
8726 | mapping from old DWARF procedures to their copy. It is used not to copy |
8727 | twice the same DWARF procedure under TYPE_NODE. */ |
8728 | |
8729 | static void |
8730 | copy_dwarf_procs_ref_in_dies (dw_die_ref die, |
8731 | comdat_type_node *type_node, |
8732 | hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs) |
8733 | { |
8734 | dw_die_ref c; |
8735 | |
8736 | copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs); |
8737 | FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c, |
8738 | type_node, |
8739 | copied_dwarf_procs)); |
8740 | } |
8741 | |
8742 | /* Traverse the DIE and set up additional .debug_types or .debug_info |
8743 | DW_UT_*type sections for each type worthy of being placed in a COMDAT |
8744 | section. */ |
8745 | |
8746 | static void |
8747 | break_out_comdat_types (dw_die_ref die) |
8748 | { |
8749 | dw_die_ref c; |
8750 | dw_die_ref first; |
8751 | dw_die_ref prev = NULL; |
8752 | dw_die_ref next = NULL; |
8753 | dw_die_ref unit = NULL; |
8754 | |
8755 | first = c = die->die_child; |
8756 | if (c) |
8757 | next = c->die_sib; |
8758 | if (c) do { |
8759 | if (prev == NULL || prev->die_sib == c) |
8760 | prev = c; |
8761 | c = next; |
8762 | next = (c == first ? NULL : c->die_sib); |
8763 | if (should_move_die_to_comdat (die: c)) |
8764 | { |
8765 | dw_die_ref replacement; |
8766 | comdat_type_node *type_node; |
8767 | |
8768 | /* Break out nested types into their own type units. */ |
8769 | break_out_comdat_types (die: c); |
8770 | |
8771 | /* Create a new type unit DIE as the root for the new tree. */ |
8772 | unit = new_die (tag_value: DW_TAG_type_unit, NULL, NULL); |
8773 | add_AT_unsigned (die: unit, attr_kind: DW_AT_language, |
8774 | unsigned_val: get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)); |
8775 | |
8776 | /* Add the new unit's type DIE into the comdat type list. */ |
8777 | type_node = ggc_cleared_alloc<comdat_type_node> (); |
8778 | type_node->root_die = unit; |
8779 | type_node->next = comdat_type_list; |
8780 | comdat_type_list = type_node; |
8781 | |
8782 | /* Generate the type signature. */ |
8783 | generate_type_signature (die: c, type_node); |
8784 | |
8785 | /* Copy the declaration context, attributes, and children of the |
8786 | declaration into the new type unit DIE, then remove this DIE |
8787 | from the main CU (or replace it with a skeleton if necessary). */ |
8788 | replacement = remove_child_or_replace_with_skeleton (unit, child: c, prev); |
8789 | type_node->skeleton_die = replacement; |
8790 | |
8791 | /* Add the DIE to the new compunit. */ |
8792 | add_child_die (die: unit, child_die: c); |
8793 | |
8794 | /* Types can reference DWARF procedures for type size or data location |
8795 | expressions. Calls in DWARF expressions cannot target procedures |
8796 | that are not in the same section. So we must copy DWARF procedures |
8797 | along with this type and then rewrite references to them. */ |
8798 | hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs; |
8799 | copy_dwarf_procs_ref_in_dies (die: c, type_node, copied_dwarf_procs); |
8800 | |
8801 | if (replacement != NULL) |
8802 | c = replacement; |
8803 | } |
8804 | else if (c->die_tag == DW_TAG_namespace |
8805 | || c->die_tag == DW_TAG_class_type |
8806 | || c->die_tag == DW_TAG_structure_type |
8807 | || c->die_tag == DW_TAG_union_type) |
8808 | { |
8809 | /* Look for nested types that can be broken out. */ |
8810 | break_out_comdat_types (die: c); |
8811 | } |
8812 | } while (next != NULL); |
8813 | } |
8814 | |
8815 | /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations. |
8816 | Enter all the cloned children into the hash table decl_table. */ |
8817 | |
8818 | static dw_die_ref |
8819 | clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table) |
8820 | { |
8821 | dw_die_ref c; |
8822 | dw_die_ref clone; |
8823 | struct decl_table_entry *entry; |
8824 | decl_table_entry **slot; |
8825 | |
8826 | if (die->die_tag == DW_TAG_subprogram) |
8827 | clone = clone_as_declaration (die); |
8828 | else |
8829 | clone = clone_die (die); |
8830 | |
8831 | slot = decl_table->find_slot_with_hash (comparable: die, |
8832 | hash: htab_hash_pointer (die), insert: INSERT); |
8833 | |
8834 | /* Assert that DIE isn't in the hash table yet. If it would be there |
8835 | before, the ancestors would be necessarily there as well, therefore |
8836 | clone_tree_partial wouldn't be called. */ |
8837 | gcc_assert (*slot == HTAB_EMPTY_ENTRY); |
8838 | |
8839 | entry = XCNEW (struct decl_table_entry); |
8840 | entry->orig = die; |
8841 | entry->copy = clone; |
8842 | *slot = entry; |
8843 | |
8844 | if (die->die_tag != DW_TAG_subprogram) |
8845 | FOR_EACH_CHILD (die, c, |
8846 | add_child_die (clone, clone_tree_partial (c, decl_table))); |
8847 | |
8848 | return clone; |
8849 | } |
8850 | |
8851 | /* Walk the DIE and its children, looking for references to incomplete |
8852 | or trivial types that are unmarked (i.e., that are not in the current |
8853 | type_unit). */ |
8854 | |
8855 | static void |
8856 | copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table) |
8857 | { |
8858 | dw_die_ref c; |
8859 | dw_attr_node *a; |
8860 | unsigned ix; |
8861 | |
8862 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
8863 | { |
8864 | if (AT_class (a) == dw_val_class_die_ref) |
8865 | { |
8866 | dw_die_ref targ = AT_ref (a); |
8867 | decl_table_entry **slot; |
8868 | struct decl_table_entry *entry; |
8869 | |
8870 | if (targ->die_mark != 0 || targ->comdat_type_p) |
8871 | continue; |
8872 | |
8873 | slot = decl_table->find_slot_with_hash (comparable: targ, |
8874 | hash: htab_hash_pointer (targ), |
8875 | insert: INSERT); |
8876 | |
8877 | if (*slot != HTAB_EMPTY_ENTRY) |
8878 | { |
8879 | /* TARG has already been copied, so we just need to |
8880 | modify the reference to point to the copy. */ |
8881 | entry = *slot; |
8882 | a->dw_attr_val.v.val_die_ref.die = entry->copy; |
8883 | } |
8884 | else |
8885 | { |
8886 | dw_die_ref parent = unit; |
8887 | dw_die_ref copy = clone_die (die: targ); |
8888 | |
8889 | /* Record in DECL_TABLE that TARG has been copied. |
8890 | Need to do this now, before the recursive call, |
8891 | because DECL_TABLE may be expanded and SLOT |
8892 | would no longer be a valid pointer. */ |
8893 | entry = XCNEW (struct decl_table_entry); |
8894 | entry->orig = targ; |
8895 | entry->copy = copy; |
8896 | *slot = entry; |
8897 | |
8898 | /* If TARG is not a declaration DIE, we need to copy its |
8899 | children. */ |
8900 | if (!is_declaration_die (die: targ)) |
8901 | { |
8902 | FOR_EACH_CHILD ( |
8903 | targ, c, |
8904 | add_child_die (copy, |
8905 | clone_tree_partial (c, decl_table))); |
8906 | } |
8907 | |
8908 | /* Make sure the cloned tree is marked as part of the |
8909 | type unit. */ |
8910 | mark_dies (copy); |
8911 | |
8912 | /* If TARG has surrounding context, copy its ancestor tree |
8913 | into the new type unit. */ |
8914 | if (targ->die_parent != NULL |
8915 | && !is_unit_die (c: targ->die_parent)) |
8916 | parent = copy_ancestor_tree (unit, die: targ->die_parent, |
8917 | decl_table); |
8918 | |
8919 | add_child_die (die: parent, child_die: copy); |
8920 | a->dw_attr_val.v.val_die_ref.die = copy; |
8921 | |
8922 | /* Make sure the newly-copied DIE is walked. If it was |
8923 | installed in a previously-added context, it won't |
8924 | get visited otherwise. */ |
8925 | if (parent != unit) |
8926 | { |
8927 | /* Find the highest point of the newly-added tree, |
8928 | mark each node along the way, and walk from there. */ |
8929 | parent->die_mark = 1; |
8930 | while (parent->die_parent |
8931 | && parent->die_parent->die_mark == 0) |
8932 | { |
8933 | parent = parent->die_parent; |
8934 | parent->die_mark = 1; |
8935 | } |
8936 | copy_decls_walk (unit, die: parent, decl_table); |
8937 | } |
8938 | } |
8939 | } |
8940 | } |
8941 | |
8942 | FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table)); |
8943 | } |
8944 | |
8945 | /* Collect skeleton dies in DIE created by break_out_comdat_types already |
8946 | and record them in DECL_TABLE. */ |
8947 | |
8948 | static void |
8949 | collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table) |
8950 | { |
8951 | dw_die_ref c; |
8952 | |
8953 | if (dw_attr_node *a = get_AT (die, attr_kind: DW_AT_signature)) |
8954 | { |
8955 | dw_die_ref targ = AT_ref (a); |
8956 | gcc_assert (targ->die_mark == 0 && targ->comdat_type_p); |
8957 | decl_table_entry **slot |
8958 | = decl_table->find_slot_with_hash (comparable: targ, |
8959 | hash: htab_hash_pointer (targ), |
8960 | insert: INSERT); |
8961 | gcc_assert (*slot == HTAB_EMPTY_ENTRY); |
8962 | /* Record in DECL_TABLE that TARG has been already copied |
8963 | by remove_child_or_replace_with_skeleton. */ |
8964 | decl_table_entry *entry = XCNEW (struct decl_table_entry); |
8965 | entry->orig = targ; |
8966 | entry->copy = die; |
8967 | *slot = entry; |
8968 | } |
8969 | FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table)); |
8970 | } |
8971 | |
8972 | /* Copy declarations for "unworthy" types into the new comdat section. |
8973 | Incomplete types, modified types, and certain other types aren't broken |
8974 | out into comdat sections of their own, so they don't have a signature, |
8975 | and we need to copy the declaration into the same section so that we |
8976 | don't have an external reference. */ |
8977 | |
8978 | static void |
8979 | copy_decls_for_unworthy_types (dw_die_ref unit) |
8980 | { |
8981 | mark_dies (unit); |
8982 | decl_hash_type decl_table (10); |
8983 | collect_skeleton_dies (die: unit, decl_table: &decl_table); |
8984 | copy_decls_walk (unit, die: unit, decl_table: &decl_table); |
8985 | unmark_dies (unit); |
8986 | } |
8987 | |
8988 | /* Traverse the DIE and add a sibling attribute if it may have the |
8989 | effect of speeding up access to siblings. To save some space, |
8990 | avoid generating sibling attributes for DIE's without children. */ |
8991 | |
8992 | static void |
8993 | add_sibling_attributes (dw_die_ref die) |
8994 | { |
8995 | dw_die_ref c; |
8996 | |
8997 | if (! die->die_child) |
8998 | return; |
8999 | |
9000 | if (die->die_parent && die != die->die_parent->die_child) |
9001 | add_AT_die_ref (die, attr_kind: DW_AT_sibling, targ_die: die->die_sib); |
9002 | |
9003 | FOR_EACH_CHILD (die, c, add_sibling_attributes (c)); |
9004 | } |
9005 | |
9006 | /* Output all location lists for the DIE and its children. */ |
9007 | |
9008 | static void |
9009 | output_location_lists (dw_die_ref die) |
9010 | { |
9011 | dw_die_ref c; |
9012 | dw_attr_node *a; |
9013 | unsigned ix; |
9014 | |
9015 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9016 | if (AT_class (a) == dw_val_class_loc_list) |
9017 | output_loc_list (AT_loc_list (a)); |
9018 | |
9019 | FOR_EACH_CHILD (die, c, output_location_lists (c)); |
9020 | } |
9021 | |
9022 | /* During assign_location_list_indexes and output_loclists_offset the |
9023 | current index, after it the number of assigned indexes (i.e. how |
9024 | large the .debug_loclists* offset table should be). */ |
9025 | static unsigned int loc_list_idx; |
9026 | |
9027 | /* Output all location list offsets for the DIE and its children. */ |
9028 | |
9029 | static void |
9030 | output_loclists_offsets (dw_die_ref die) |
9031 | { |
9032 | dw_die_ref c; |
9033 | dw_attr_node *a; |
9034 | unsigned ix; |
9035 | |
9036 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9037 | if (AT_class (a) == dw_val_class_loc_list) |
9038 | { |
9039 | dw_loc_list_ref l = AT_loc_list (a); |
9040 | if (l->offset_emitted) |
9041 | continue; |
9042 | dw2_asm_output_delta (dwarf_offset_size, l->ll_symbol, |
9043 | loc_section_label, NULL); |
9044 | gcc_assert (l->hash == loc_list_idx); |
9045 | loc_list_idx++; |
9046 | l->offset_emitted = true; |
9047 | } |
9048 | |
9049 | FOR_EACH_CHILD (die, c, output_loclists_offsets (c)); |
9050 | } |
9051 | |
9052 | /* Recursively set indexes of location lists. */ |
9053 | |
9054 | static void |
9055 | assign_location_list_indexes (dw_die_ref die) |
9056 | { |
9057 | dw_die_ref c; |
9058 | dw_attr_node *a; |
9059 | unsigned ix; |
9060 | |
9061 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9062 | if (AT_class (a) == dw_val_class_loc_list) |
9063 | { |
9064 | dw_loc_list_ref list = AT_loc_list (a); |
9065 | if (!list->num_assigned) |
9066 | { |
9067 | list->num_assigned = true; |
9068 | list->hash = loc_list_idx++; |
9069 | } |
9070 | } |
9071 | |
9072 | FOR_EACH_CHILD (die, c, assign_location_list_indexes (c)); |
9073 | } |
9074 | |
9075 | /* We want to limit the number of external references, because they are |
9076 | larger than local references: a relocation takes multiple words, and |
9077 | even a sig8 reference is always eight bytes, whereas a local reference |
9078 | can be as small as one byte (though DW_FORM_ref is usually 4 in GCC). |
9079 | So if we encounter multiple external references to the same type DIE, we |
9080 | make a local typedef stub for it and redirect all references there. |
9081 | |
9082 | This is the element of the hash table for keeping track of these |
9083 | references. */ |
9084 | |
9085 | struct external_ref |
9086 | { |
9087 | dw_die_ref type; |
9088 | dw_die_ref stub; |
9089 | unsigned n_refs; |
9090 | }; |
9091 | |
9092 | /* Hashtable helpers. */ |
9093 | |
9094 | struct external_ref_hasher : free_ptr_hash <external_ref> |
9095 | { |
9096 | static inline hashval_t hash (const external_ref *); |
9097 | static inline bool equal (const external_ref *, const external_ref *); |
9098 | }; |
9099 | |
9100 | inline hashval_t |
9101 | external_ref_hasher::hash (const external_ref *r) |
9102 | { |
9103 | dw_die_ref die = r->type; |
9104 | hashval_t h = 0; |
9105 | |
9106 | /* We can't use the address of the DIE for hashing, because |
9107 | that will make the order of the stub DIEs non-deterministic. */ |
9108 | if (! die->comdat_type_p) |
9109 | /* We have a symbol; use it to compute a hash. */ |
9110 | h = htab_hash_string (die->die_id.die_symbol); |
9111 | else |
9112 | { |
9113 | /* We have a type signature; use a subset of the bits as the hash. |
9114 | The 8-byte signature is at least as large as hashval_t. */ |
9115 | comdat_type_node *type_node = die->die_id.die_type_node; |
9116 | memcpy (dest: &h, src: type_node->signature, n: sizeof (h)); |
9117 | } |
9118 | return h; |
9119 | } |
9120 | |
9121 | inline bool |
9122 | external_ref_hasher::equal (const external_ref *r1, const external_ref *r2) |
9123 | { |
9124 | return r1->type == r2->type; |
9125 | } |
9126 | |
9127 | typedef hash_table<external_ref_hasher> external_ref_hash_type; |
9128 | |
9129 | /* Return a pointer to the external_ref for references to DIE. */ |
9130 | |
9131 | static struct external_ref * |
9132 | lookup_external_ref (external_ref_hash_type *map, dw_die_ref die) |
9133 | { |
9134 | struct external_ref ref, *ref_p; |
9135 | external_ref **slot; |
9136 | |
9137 | ref.type = die; |
9138 | slot = map->find_slot (value: &ref, insert: INSERT); |
9139 | if (*slot != HTAB_EMPTY_ENTRY) |
9140 | return *slot; |
9141 | |
9142 | ref_p = XCNEW (struct external_ref); |
9143 | ref_p->type = die; |
9144 | *slot = ref_p; |
9145 | return ref_p; |
9146 | } |
9147 | |
9148 | /* Subroutine of optimize_external_refs, below. |
9149 | |
9150 | If we see a type skeleton, record it as our stub. If we see external |
9151 | references, remember how many we've seen. */ |
9152 | |
9153 | static void |
9154 | optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map) |
9155 | { |
9156 | dw_die_ref c; |
9157 | dw_attr_node *a; |
9158 | unsigned ix; |
9159 | struct external_ref *ref_p; |
9160 | |
9161 | if (is_type_die (die) |
9162 | && (c = get_AT_ref (die, attr_kind: DW_AT_signature))) |
9163 | { |
9164 | /* This is a local skeleton; use it for local references. */ |
9165 | ref_p = lookup_external_ref (map, die: c); |
9166 | ref_p->stub = die; |
9167 | } |
9168 | |
9169 | /* Scan the DIE references, and remember any that refer to DIEs from |
9170 | other CUs (i.e. those which are not marked). */ |
9171 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9172 | if (AT_class (a) == dw_val_class_die_ref |
9173 | && (c = AT_ref (a))->die_mark == 0 |
9174 | && is_type_die (die: c)) |
9175 | { |
9176 | ref_p = lookup_external_ref (map, die: c); |
9177 | ref_p->n_refs++; |
9178 | } |
9179 | |
9180 | FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map)); |
9181 | } |
9182 | |
9183 | /* htab_traverse callback function for optimize_external_refs, below. SLOT |
9184 | points to an external_ref, DATA is the CU we're processing. If we don't |
9185 | already have a local stub, and we have multiple refs, build a stub. */ |
9186 | |
9187 | int |
9188 | dwarf2_build_local_stub (external_ref **slot, dw_die_ref data) |
9189 | { |
9190 | struct external_ref *ref_p = *slot; |
9191 | |
9192 | if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict) |
9193 | { |
9194 | /* We have multiple references to this type, so build a small stub. |
9195 | Both of these forms are a bit dodgy from the perspective of the |
9196 | DWARF standard, since technically they should have names. */ |
9197 | dw_die_ref cu = data; |
9198 | dw_die_ref type = ref_p->type; |
9199 | dw_die_ref stub = NULL; |
9200 | |
9201 | if (type->comdat_type_p) |
9202 | { |
9203 | /* If we refer to this type via sig8, use AT_signature. */ |
9204 | stub = new_die (tag_value: type->die_tag, parent_die: cu, NULL_TREE); |
9205 | add_AT_die_ref (die: stub, attr_kind: DW_AT_signature, targ_die: type); |
9206 | } |
9207 | else |
9208 | { |
9209 | /* Otherwise, use a typedef with no name. */ |
9210 | stub = new_die (tag_value: DW_TAG_typedef, parent_die: cu, NULL_TREE); |
9211 | add_AT_die_ref (die: stub, attr_kind: DW_AT_type, targ_die: type); |
9212 | } |
9213 | |
9214 | stub->die_mark++; |
9215 | ref_p->stub = stub; |
9216 | } |
9217 | return 1; |
9218 | } |
9219 | |
9220 | /* DIE is a unit; look through all the DIE references to see if there are |
9221 | any external references to types, and if so, create local stubs for |
9222 | them which will be applied in build_abbrev_table. This is useful because |
9223 | references to local DIEs are smaller. */ |
9224 | |
9225 | static external_ref_hash_type * |
9226 | optimize_external_refs (dw_die_ref die) |
9227 | { |
9228 | external_ref_hash_type *map = new external_ref_hash_type (10); |
9229 | optimize_external_refs_1 (die, map); |
9230 | map->traverse <dw_die_ref, dwarf2_build_local_stub> (argument: die); |
9231 | return map; |
9232 | } |
9233 | |
9234 | /* The following 3 variables are temporaries that are computed only during the |
9235 | build_abbrev_table call and used and released during the following |
9236 | optimize_abbrev_table call. */ |
9237 | |
9238 | /* First abbrev_id that can be optimized based on usage. */ |
9239 | static unsigned int abbrev_opt_start; |
9240 | |
9241 | /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with |
9242 | abbrev_id smaller than this, because they must be already sized |
9243 | during build_abbrev_table). */ |
9244 | static unsigned int abbrev_opt_base_type_end; |
9245 | |
9246 | /* Vector of usage counts during build_abbrev_table. Indexed by |
9247 | abbrev_id - abbrev_opt_start. */ |
9248 | static vec<unsigned int> abbrev_usage_count; |
9249 | |
9250 | /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */ |
9251 | static vec<dw_die_ref> sorted_abbrev_dies; |
9252 | |
9253 | /* The format of each DIE (and its attribute value pairs) is encoded in an |
9254 | abbreviation table. This routine builds the abbreviation table and assigns |
9255 | a unique abbreviation id for each abbreviation entry. The children of each |
9256 | die are visited recursively. */ |
9257 | |
9258 | static void |
9259 | build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map) |
9260 | { |
9261 | unsigned int abbrev_id = 0; |
9262 | dw_die_ref c; |
9263 | dw_attr_node *a; |
9264 | unsigned ix; |
9265 | dw_die_ref abbrev; |
9266 | |
9267 | /* Scan the DIE references, and replace any that refer to |
9268 | DIEs from other CUs (i.e. those which are not marked) with |
9269 | the local stubs we built in optimize_external_refs. */ |
9270 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9271 | if (AT_class (a) == dw_val_class_die_ref |
9272 | && (c = AT_ref (a))->die_mark == 0) |
9273 | { |
9274 | struct external_ref *ref_p; |
9275 | gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol); |
9276 | |
9277 | if (is_type_die (die: c) |
9278 | && (ref_p = lookup_external_ref (map: extern_map, die: c)) |
9279 | && ref_p->stub && ref_p->stub != die) |
9280 | { |
9281 | gcc_assert (a->dw_attr != DW_AT_signature); |
9282 | change_AT_die_ref (ref: a, new_die: ref_p->stub); |
9283 | } |
9284 | else |
9285 | /* We aren't changing this reference, so mark it external. */ |
9286 | set_AT_ref_external (a, i: 1); |
9287 | } |
9288 | |
9289 | FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev) |
9290 | { |
9291 | dw_attr_node *die_a, *abbrev_a; |
9292 | unsigned ix; |
9293 | bool ok = true; |
9294 | |
9295 | if (abbrev_id == 0) |
9296 | continue; |
9297 | if (abbrev->die_tag != die->die_tag) |
9298 | continue; |
9299 | if ((abbrev->die_child != NULL) != (die->die_child != NULL)) |
9300 | continue; |
9301 | |
9302 | if (vec_safe_length (v: abbrev->die_attr) != vec_safe_length (v: die->die_attr)) |
9303 | continue; |
9304 | |
9305 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a) |
9306 | { |
9307 | abbrev_a = &(*abbrev->die_attr)[ix]; |
9308 | if ((abbrev_a->dw_attr != die_a->dw_attr) |
9309 | || (value_format (abbrev_a) != value_format (die_a))) |
9310 | { |
9311 | ok = false; |
9312 | break; |
9313 | } |
9314 | } |
9315 | if (ok) |
9316 | break; |
9317 | } |
9318 | |
9319 | if (abbrev_id >= vec_safe_length (v: abbrev_die_table)) |
9320 | { |
9321 | vec_safe_push (v&: abbrev_die_table, obj: die); |
9322 | if (abbrev_opt_start) |
9323 | abbrev_usage_count.safe_push (obj: 0); |
9324 | } |
9325 | if (abbrev_opt_start && abbrev_id >= abbrev_opt_start) |
9326 | { |
9327 | abbrev_usage_count[abbrev_id - abbrev_opt_start]++; |
9328 | sorted_abbrev_dies.safe_push (obj: die); |
9329 | } |
9330 | |
9331 | die->die_abbrev = abbrev_id; |
9332 | FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map)); |
9333 | } |
9334 | |
9335 | /* Callback function for sorted_abbrev_dies vector sorting. We sort |
9336 | by die_abbrev's usage count, from the most commonly used |
9337 | abbreviation to the least. */ |
9338 | |
9339 | static int |
9340 | die_abbrev_cmp (const void *p1, const void *p2) |
9341 | { |
9342 | dw_die_ref die1 = *(const dw_die_ref *) p1; |
9343 | dw_die_ref die2 = *(const dw_die_ref *) p2; |
9344 | |
9345 | gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start); |
9346 | gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start); |
9347 | |
9348 | if (die1->die_abbrev >= abbrev_opt_base_type_end |
9349 | && die2->die_abbrev >= abbrev_opt_base_type_end) |
9350 | { |
9351 | if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start] |
9352 | > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start]) |
9353 | return -1; |
9354 | if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start] |
9355 | < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start]) |
9356 | return 1; |
9357 | } |
9358 | |
9359 | /* Stabilize the sort. */ |
9360 | if (die1->die_abbrev < die2->die_abbrev) |
9361 | return -1; |
9362 | if (die1->die_abbrev > die2->die_abbrev) |
9363 | return 1; |
9364 | |
9365 | return 0; |
9366 | } |
9367 | |
9368 | /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes |
9369 | of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1] |
9370 | into dw_val_class_const_implicit or |
9371 | dw_val_class_unsigned_const_implicit. */ |
9372 | |
9373 | static void |
9374 | optimize_implicit_const (unsigned int first_id, unsigned int end, |
9375 | vec<bool> &implicit_consts) |
9376 | { |
9377 | /* It never makes sense if there is just one DIE using the abbreviation. */ |
9378 | if (end < first_id + 2) |
9379 | return; |
9380 | |
9381 | dw_attr_node *a; |
9382 | unsigned ix, i; |
9383 | dw_die_ref die = sorted_abbrev_dies[first_id]; |
9384 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9385 | if (implicit_consts[ix]) |
9386 | { |
9387 | enum dw_val_class new_class = dw_val_class_none; |
9388 | switch (AT_class (a)) |
9389 | { |
9390 | case dw_val_class_unsigned_const: |
9391 | if ((HOST_WIDE_INT) AT_unsigned (a) < 0) |
9392 | continue; |
9393 | |
9394 | /* The .debug_abbrev section will grow by |
9395 | size_of_sleb128 (AT_unsigned (a)) and we avoid the constants |
9396 | in all the DIEs using that abbreviation. */ |
9397 | if (constant_size (AT_unsigned (a)) * (end - first_id) |
9398 | <= (unsigned) size_of_sleb128 (AT_unsigned (a))) |
9399 | continue; |
9400 | |
9401 | new_class = dw_val_class_unsigned_const_implicit; |
9402 | break; |
9403 | |
9404 | case dw_val_class_const: |
9405 | new_class = dw_val_class_const_implicit; |
9406 | break; |
9407 | |
9408 | case dw_val_class_file: |
9409 | new_class = dw_val_class_file_implicit; |
9410 | break; |
9411 | |
9412 | default: |
9413 | continue; |
9414 | } |
9415 | for (i = first_id; i < end; i++) |
9416 | (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class |
9417 | = new_class; |
9418 | } |
9419 | } |
9420 | |
9421 | /* Attempt to optimize abbreviation table from abbrev_opt_start |
9422 | abbreviation above. */ |
9423 | |
9424 | static void |
9425 | optimize_abbrev_table (void) |
9426 | { |
9427 | if (abbrev_opt_start |
9428 | && vec_safe_length (v: abbrev_die_table) > abbrev_opt_start |
9429 | && (dwarf_version >= 5 || vec_safe_length (v: abbrev_die_table) > 127)) |
9430 | { |
9431 | auto_vec<bool, 32> implicit_consts; |
9432 | sorted_abbrev_dies.qsort (die_abbrev_cmp); |
9433 | |
9434 | unsigned int abbrev_id = abbrev_opt_start - 1; |
9435 | unsigned int first_id = ~0U; |
9436 | unsigned int last_abbrev_id = 0; |
9437 | unsigned int i; |
9438 | dw_die_ref die; |
9439 | if (abbrev_opt_base_type_end > abbrev_opt_start) |
9440 | abbrev_id = abbrev_opt_base_type_end - 1; |
9441 | /* Reassign abbreviation ids from abbrev_opt_start above, so that |
9442 | most commonly used abbreviations come first. */ |
9443 | FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die) |
9444 | { |
9445 | dw_attr_node *a; |
9446 | unsigned ix; |
9447 | |
9448 | /* If calc_base_type_die_sizes has been called, the CU and |
9449 | base types after it can't be optimized, because we've already |
9450 | calculated their DIE offsets. We've sorted them first. */ |
9451 | if (die->die_abbrev < abbrev_opt_base_type_end) |
9452 | continue; |
9453 | if (die->die_abbrev != last_abbrev_id) |
9454 | { |
9455 | last_abbrev_id = die->die_abbrev; |
9456 | if (dwarf_version >= 5 && first_id != ~0U) |
9457 | optimize_implicit_const (first_id, end: i, implicit_consts); |
9458 | abbrev_id++; |
9459 | (*abbrev_die_table)[abbrev_id] = die; |
9460 | if (dwarf_version >= 5) |
9461 | { |
9462 | first_id = i; |
9463 | implicit_consts.truncate (size: 0); |
9464 | |
9465 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9466 | switch (AT_class (a)) |
9467 | { |
9468 | case dw_val_class_const: |
9469 | case dw_val_class_unsigned_const: |
9470 | case dw_val_class_file: |
9471 | implicit_consts.safe_push (obj: true); |
9472 | break; |
9473 | default: |
9474 | implicit_consts.safe_push (obj: false); |
9475 | break; |
9476 | } |
9477 | } |
9478 | } |
9479 | else if (dwarf_version >= 5) |
9480 | { |
9481 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9482 | if (!implicit_consts[ix]) |
9483 | continue; |
9484 | else |
9485 | { |
9486 | dw_attr_node *other_a |
9487 | = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix]; |
9488 | if (!dw_val_equal_p (a: &a->dw_attr_val, |
9489 | b: &other_a->dw_attr_val)) |
9490 | implicit_consts[ix] = false; |
9491 | } |
9492 | } |
9493 | die->die_abbrev = abbrev_id; |
9494 | } |
9495 | gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1); |
9496 | if (dwarf_version >= 5 && first_id != ~0U) |
9497 | optimize_implicit_const (first_id, end: i, implicit_consts); |
9498 | } |
9499 | |
9500 | abbrev_opt_start = 0; |
9501 | abbrev_opt_base_type_end = 0; |
9502 | abbrev_usage_count.release (); |
9503 | sorted_abbrev_dies.release (); |
9504 | } |
9505 | |
9506 | /* Return the power-of-two number of bytes necessary to represent VALUE. */ |
9507 | |
9508 | static int |
9509 | constant_size (unsigned HOST_WIDE_INT value) |
9510 | { |
9511 | int log; |
9512 | |
9513 | if (value == 0) |
9514 | log = 0; |
9515 | else |
9516 | log = floor_log2 (x: value); |
9517 | |
9518 | log = log / 8; |
9519 | log = 1 << (floor_log2 (x: log) + 1); |
9520 | |
9521 | return log; |
9522 | } |
9523 | |
9524 | /* Return the size of a DIE as it is represented in the |
9525 | .debug_info section. */ |
9526 | |
9527 | static unsigned long |
9528 | size_of_die (dw_die_ref die) |
9529 | { |
9530 | unsigned long size = 0; |
9531 | dw_attr_node *a; |
9532 | unsigned ix; |
9533 | enum dwarf_form form; |
9534 | |
9535 | size += size_of_uleb128 (die->die_abbrev); |
9536 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9537 | { |
9538 | switch (AT_class (a)) |
9539 | { |
9540 | case dw_val_class_addr: |
9541 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
9542 | { |
9543 | gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED); |
9544 | size += size_of_uleb128 (AT_index (a)); |
9545 | } |
9546 | else |
9547 | size += DWARF2_ADDR_SIZE; |
9548 | break; |
9549 | case dw_val_class_offset: |
9550 | size += dwarf_offset_size; |
9551 | break; |
9552 | case dw_val_class_loc: |
9553 | { |
9554 | unsigned long lsize = size_of_locs (loc: AT_loc (a)); |
9555 | |
9556 | /* Block length. */ |
9557 | if (dwarf_version >= 4) |
9558 | size += size_of_uleb128 (lsize); |
9559 | else |
9560 | size += constant_size (value: lsize); |
9561 | size += lsize; |
9562 | } |
9563 | break; |
9564 | case dw_val_class_loc_list: |
9565 | if (dwarf_split_debug_info && dwarf_version >= 5) |
9566 | { |
9567 | gcc_assert (AT_loc_list (a)->num_assigned); |
9568 | size += size_of_uleb128 (AT_loc_list (a)->hash); |
9569 | } |
9570 | else |
9571 | size += dwarf_offset_size; |
9572 | break; |
9573 | case dw_val_class_view_list: |
9574 | size += dwarf_offset_size; |
9575 | break; |
9576 | case dw_val_class_range_list: |
9577 | if (value_format (a) == DW_FORM_rnglistx) |
9578 | { |
9579 | gcc_assert (rnglist_idx); |
9580 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
9581 | size += size_of_uleb128 (r->idx); |
9582 | } |
9583 | else |
9584 | size += dwarf_offset_size; |
9585 | break; |
9586 | case dw_val_class_const: |
9587 | size += size_of_sleb128 (AT_int (a)); |
9588 | break; |
9589 | case dw_val_class_unsigned_const: |
9590 | { |
9591 | int csize = constant_size (value: AT_unsigned (a)); |
9592 | if (dwarf_version == 3 |
9593 | && a->dw_attr == DW_AT_data_member_location |
9594 | && csize >= 4) |
9595 | size += size_of_uleb128 (AT_unsigned (a)); |
9596 | else |
9597 | size += csize; |
9598 | } |
9599 | break; |
9600 | case dw_val_class_symview: |
9601 | if (symview_upper_bound <= 0xff) |
9602 | size += 1; |
9603 | else if (symview_upper_bound <= 0xffff) |
9604 | size += 2; |
9605 | else if (symview_upper_bound <= 0xffffffff) |
9606 | size += 4; |
9607 | else |
9608 | size += 8; |
9609 | break; |
9610 | case dw_val_class_const_implicit: |
9611 | case dw_val_class_unsigned_const_implicit: |
9612 | case dw_val_class_file_implicit: |
9613 | /* These occupy no size in the DIE, just an extra sleb128 in |
9614 | .debug_abbrev. */ |
9615 | break; |
9616 | case dw_val_class_const_double: |
9617 | size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR; |
9618 | if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS) |
9619 | size++; /* block */ |
9620 | break; |
9621 | case dw_val_class_wide_int: |
9622 | size += (get_full_len (op: *a->dw_attr_val.v.val_wide) |
9623 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
9624 | if (get_full_len (op: *a->dw_attr_val.v.val_wide) |
9625 | * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS) |
9626 | size++; /* block */ |
9627 | break; |
9628 | case dw_val_class_vec: |
9629 | size += constant_size (value: a->dw_attr_val.v.val_vec.length |
9630 | * a->dw_attr_val.v.val_vec.elt_size) |
9631 | + a->dw_attr_val.v.val_vec.length |
9632 | * a->dw_attr_val.v.val_vec.elt_size; /* block */ |
9633 | break; |
9634 | case dw_val_class_flag: |
9635 | if (dwarf_version >= 4) |
9636 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
9637 | so DW_FORM_flag_present can be used. If that ever changes, |
9638 | we'll need to use DW_FORM_flag and have some optimization |
9639 | in build_abbrev_table that will change those to |
9640 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
9641 | the same abbrev entry. */ |
9642 | gcc_assert (a->dw_attr_val.v.val_flag == 1); |
9643 | else |
9644 | size += 1; |
9645 | break; |
9646 | case dw_val_class_die_ref: |
9647 | if (AT_ref_external (a)) |
9648 | { |
9649 | /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions |
9650 | we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr |
9651 | is sized by target address length, whereas in DWARF3 |
9652 | it's always sized as an offset. */ |
9653 | if (AT_ref (a)->comdat_type_p) |
9654 | size += DWARF_TYPE_SIGNATURE_SIZE; |
9655 | else if (dwarf_version == 2) |
9656 | size += DWARF2_ADDR_SIZE; |
9657 | else |
9658 | size += dwarf_offset_size; |
9659 | } |
9660 | else |
9661 | size += dwarf_offset_size; |
9662 | break; |
9663 | case dw_val_class_fde_ref: |
9664 | size += dwarf_offset_size; |
9665 | break; |
9666 | case dw_val_class_lbl_id: |
9667 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
9668 | { |
9669 | gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED); |
9670 | size += size_of_uleb128 (AT_index (a)); |
9671 | } |
9672 | else |
9673 | size += DWARF2_ADDR_SIZE; |
9674 | break; |
9675 | case dw_val_class_lineptr: |
9676 | case dw_val_class_macptr: |
9677 | case dw_val_class_loclistsptr: |
9678 | size += dwarf_offset_size; |
9679 | break; |
9680 | case dw_val_class_str: |
9681 | form = AT_string_form (a); |
9682 | if (form == DW_FORM_strp || form == DW_FORM_line_strp) |
9683 | size += dwarf_offset_size; |
9684 | else if (form == dwarf_FORM (form: DW_FORM_strx)) |
9685 | size += size_of_uleb128 (AT_index (a)); |
9686 | else |
9687 | size += strlen (s: a->dw_attr_val.v.val_str->str) + 1; |
9688 | break; |
9689 | case dw_val_class_file: |
9690 | size += constant_size (value: maybe_emit_file (fd: a->dw_attr_val.v.val_file)); |
9691 | break; |
9692 | case dw_val_class_data8: |
9693 | size += 8; |
9694 | break; |
9695 | case dw_val_class_vms_delta: |
9696 | size += dwarf_offset_size; |
9697 | break; |
9698 | case dw_val_class_high_pc: |
9699 | size += DWARF2_ADDR_SIZE; |
9700 | break; |
9701 | case dw_val_class_discr_value: |
9702 | size += size_of_discr_value (discr_value: &a->dw_attr_val.v.val_discr_value); |
9703 | break; |
9704 | case dw_val_class_discr_list: |
9705 | { |
9706 | unsigned block_size = size_of_discr_list (discr_list: AT_discr_list (a)); |
9707 | |
9708 | /* This is a block, so we have the block length and then its |
9709 | data. */ |
9710 | size += constant_size (value: block_size) + block_size; |
9711 | } |
9712 | break; |
9713 | default: |
9714 | gcc_unreachable (); |
9715 | } |
9716 | } |
9717 | |
9718 | return size; |
9719 | } |
9720 | |
9721 | /* Size the debugging information associated with a given DIE. Visits the |
9722 | DIE's children recursively. Updates the global variable next_die_offset, on |
9723 | each time through. Uses the current value of next_die_offset to update the |
9724 | die_offset field in each DIE. */ |
9725 | |
9726 | static void |
9727 | calc_die_sizes (dw_die_ref die) |
9728 | { |
9729 | dw_die_ref c; |
9730 | |
9731 | gcc_assert (die->die_offset == 0 |
9732 | || (unsigned long int) die->die_offset == next_die_offset); |
9733 | die->die_offset = next_die_offset; |
9734 | next_die_offset += size_of_die (die); |
9735 | |
9736 | FOR_EACH_CHILD (die, c, calc_die_sizes (c)); |
9737 | |
9738 | if (die->die_child != NULL) |
9739 | /* Count the null byte used to terminate sibling lists. */ |
9740 | next_die_offset += 1; |
9741 | } |
9742 | |
9743 | /* Size just the base type children at the start of the CU. |
9744 | This is needed because build_abbrev needs to size locs |
9745 | and sizing of type based stack ops needs to know die_offset |
9746 | values for the base types. */ |
9747 | |
9748 | static void |
9749 | calc_base_type_die_sizes (void) |
9750 | { |
9751 | unsigned long die_offset = (dwarf_split_debug_info |
9752 | ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
9753 | : DWARF_COMPILE_UNIT_HEADER_SIZE); |
9754 | unsigned int i; |
9755 | dw_die_ref base_type; |
9756 | #if ENABLE_ASSERT_CHECKING |
9757 | dw_die_ref prev = comp_unit_die ()->die_child; |
9758 | #endif |
9759 | |
9760 | die_offset += size_of_die (die: comp_unit_die ()); |
9761 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
9762 | { |
9763 | #if ENABLE_ASSERT_CHECKING |
9764 | gcc_assert (base_type->die_offset == 0 |
9765 | && prev->die_sib == base_type |
9766 | && base_type->die_child == NULL |
9767 | && base_type->die_abbrev); |
9768 | prev = base_type; |
9769 | #endif |
9770 | if (abbrev_opt_start |
9771 | && base_type->die_abbrev >= abbrev_opt_base_type_end) |
9772 | abbrev_opt_base_type_end = base_type->die_abbrev + 1; |
9773 | base_type->die_offset = die_offset; |
9774 | die_offset += size_of_die (die: base_type); |
9775 | } |
9776 | } |
9777 | |
9778 | /* Set the marks for a die and its children. We do this so |
9779 | that we know whether or not a reference needs to use FORM_ref_addr; only |
9780 | DIEs in the same CU will be marked. We used to clear out the offset |
9781 | and use that as the flag, but ran into ordering problems. */ |
9782 | |
9783 | static void |
9784 | mark_dies (dw_die_ref die) |
9785 | { |
9786 | dw_die_ref c; |
9787 | |
9788 | gcc_assert (!die->die_mark); |
9789 | |
9790 | die->die_mark = 1; |
9791 | FOR_EACH_CHILD (die, c, mark_dies (c)); |
9792 | } |
9793 | |
9794 | /* Clear the marks for a die and its children. */ |
9795 | |
9796 | static void |
9797 | unmark_dies (dw_die_ref die) |
9798 | { |
9799 | dw_die_ref c; |
9800 | |
9801 | if (! use_debug_types) |
9802 | gcc_assert (die->die_mark); |
9803 | |
9804 | die->die_mark = 0; |
9805 | FOR_EACH_CHILD (die, c, unmark_dies (c)); |
9806 | } |
9807 | |
9808 | /* Clear the marks for a die, its children and referred dies. */ |
9809 | |
9810 | static void |
9811 | unmark_all_dies (dw_die_ref die) |
9812 | { |
9813 | dw_die_ref c; |
9814 | dw_attr_node *a; |
9815 | unsigned ix; |
9816 | |
9817 | if (!die->die_mark) |
9818 | return; |
9819 | die->die_mark = 0; |
9820 | |
9821 | FOR_EACH_CHILD (die, c, unmark_all_dies (c)); |
9822 | |
9823 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
9824 | if (AT_class (a) == dw_val_class_die_ref) |
9825 | unmark_all_dies (die: AT_ref (a)); |
9826 | } |
9827 | |
9828 | /* Calculate if the entry should appear in the final output file. It may be |
9829 | from a pruned a type. */ |
9830 | |
9831 | static bool |
9832 | include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p) |
9833 | { |
9834 | /* By limiting gnu pubnames to definitions only, gold can generate a |
9835 | gdb index without entries for declarations, which don't include |
9836 | enough information to be useful. */ |
9837 | if (debug_generate_pub_sections == 2 && is_declaration_die (die: p->die)) |
9838 | return false; |
9839 | |
9840 | if (table == pubname_table) |
9841 | { |
9842 | /* Enumerator names are part of the pubname table, but the |
9843 | parent DW_TAG_enumeration_type die may have been pruned. |
9844 | Don't output them if that is the case. */ |
9845 | if (p->die->die_tag == DW_TAG_enumerator && |
9846 | (p->die->die_parent == NULL |
9847 | || !p->die->die_parent->die_perennial_p)) |
9848 | return false; |
9849 | |
9850 | /* Everything else in the pubname table is included. */ |
9851 | return true; |
9852 | } |
9853 | |
9854 | /* The pubtypes table shouldn't include types that have been |
9855 | pruned. */ |
9856 | return (p->die->die_offset != 0 |
9857 | || !flag_eliminate_unused_debug_types); |
9858 | } |
9859 | |
9860 | /* Return the size of the .debug_pubnames or .debug_pubtypes table |
9861 | generated for the compilation unit. */ |
9862 | |
9863 | static unsigned long |
9864 | size_of_pubnames (vec<pubname_entry, va_gc> *names) |
9865 | { |
9866 | unsigned long size; |
9867 | unsigned i; |
9868 | pubname_entry *p; |
9869 | int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0; |
9870 | |
9871 | size = DWARF_PUBNAMES_HEADER_SIZE; |
9872 | FOR_EACH_VEC_ELT (*names, i, p) |
9873 | if (include_pubname_in_output (table: names, p)) |
9874 | size += strlen (s: p->name) + dwarf_offset_size + 1 + space_for_flags; |
9875 | |
9876 | size += dwarf_offset_size; |
9877 | return size; |
9878 | } |
9879 | |
9880 | /* Return the size of the information in the .debug_aranges section. */ |
9881 | |
9882 | static unsigned long |
9883 | size_of_aranges (void) |
9884 | { |
9885 | unsigned long size; |
9886 | |
9887 | size = DWARF_ARANGES_HEADER_SIZE; |
9888 | |
9889 | /* Count the address/length pair for this compilation unit. */ |
9890 | if (switch_text_ranges) |
9891 | size += 2 * DWARF2_ADDR_SIZE |
9892 | * (vec_safe_length (v: switch_text_ranges) / 2 + 1); |
9893 | if (switch_cold_ranges) |
9894 | size += 2 * DWARF2_ADDR_SIZE |
9895 | * (vec_safe_length (v: switch_cold_ranges) / 2 + 1); |
9896 | if (have_multiple_function_sections) |
9897 | { |
9898 | unsigned fde_idx; |
9899 | dw_fde_ref fde; |
9900 | |
9901 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
9902 | { |
9903 | if (fde->ignored_debug) |
9904 | continue; |
9905 | if (!fde->in_std_section) |
9906 | size += 2 * DWARF2_ADDR_SIZE; |
9907 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
9908 | size += 2 * DWARF2_ADDR_SIZE; |
9909 | } |
9910 | } |
9911 | |
9912 | /* Count the two zero words used to terminated the address range table. */ |
9913 | size += 2 * DWARF2_ADDR_SIZE; |
9914 | return size; |
9915 | } |
9916 | |
9917 | /* Select the encoding of an attribute value. */ |
9918 | |
9919 | static enum dwarf_form |
9920 | value_format (dw_attr_node *a) |
9921 | { |
9922 | switch (AT_class (a)) |
9923 | { |
9924 | case dw_val_class_addr: |
9925 | /* Only very few attributes allow DW_FORM_addr. */ |
9926 | switch (a->dw_attr) |
9927 | { |
9928 | case DW_AT_low_pc: |
9929 | case DW_AT_high_pc: |
9930 | case DW_AT_entry_pc: |
9931 | case DW_AT_trampoline: |
9932 | return (AT_index (a) == NOT_INDEXED |
9933 | ? DW_FORM_addr : dwarf_FORM (form: DW_FORM_addrx)); |
9934 | default: |
9935 | break; |
9936 | } |
9937 | switch (DWARF2_ADDR_SIZE) |
9938 | { |
9939 | case 1: |
9940 | return DW_FORM_data1; |
9941 | case 2: |
9942 | return DW_FORM_data2; |
9943 | case 4: |
9944 | return DW_FORM_data4; |
9945 | case 8: |
9946 | return DW_FORM_data8; |
9947 | default: |
9948 | gcc_unreachable (); |
9949 | } |
9950 | case dw_val_class_loc_list: |
9951 | if (dwarf_split_debug_info |
9952 | && dwarf_version >= 5 |
9953 | && AT_loc_list (a)->num_assigned) |
9954 | return DW_FORM_loclistx; |
9955 | /* FALLTHRU */ |
9956 | case dw_val_class_view_list: |
9957 | case dw_val_class_range_list: |
9958 | /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo |
9959 | but in .debug_info use DW_FORM_sec_offset, which is shorter if we |
9960 | care about sizes of .debug* sections in shared libraries and |
9961 | executables and don't take into account relocations that affect just |
9962 | relocatable objects - for DW_FORM_rnglistx we'd have to emit offset |
9963 | table in the .debug_rnglists section. */ |
9964 | if (dwarf_split_debug_info |
9965 | && dwarf_version >= 5 |
9966 | && AT_class (a) == dw_val_class_range_list |
9967 | && rnglist_idx |
9968 | && a->dw_attr_val.val_entry != RELOCATED_OFFSET) |
9969 | return DW_FORM_rnglistx; |
9970 | if (dwarf_version >= 4) |
9971 | return DW_FORM_sec_offset; |
9972 | /* FALLTHRU */ |
9973 | case dw_val_class_vms_delta: |
9974 | case dw_val_class_offset: |
9975 | switch (dwarf_offset_size) |
9976 | { |
9977 | case 4: |
9978 | return DW_FORM_data4; |
9979 | case 8: |
9980 | return DW_FORM_data8; |
9981 | default: |
9982 | gcc_unreachable (); |
9983 | } |
9984 | case dw_val_class_loc: |
9985 | if (dwarf_version >= 4) |
9986 | return DW_FORM_exprloc; |
9987 | switch (constant_size (value: size_of_locs (loc: AT_loc (a)))) |
9988 | { |
9989 | case 1: |
9990 | return DW_FORM_block1; |
9991 | case 2: |
9992 | return DW_FORM_block2; |
9993 | case 4: |
9994 | return DW_FORM_block4; |
9995 | default: |
9996 | gcc_unreachable (); |
9997 | } |
9998 | case dw_val_class_const: |
9999 | return DW_FORM_sdata; |
10000 | case dw_val_class_unsigned_const: |
10001 | switch (constant_size (value: AT_unsigned (a))) |
10002 | { |
10003 | case 1: |
10004 | return DW_FORM_data1; |
10005 | case 2: |
10006 | return DW_FORM_data2; |
10007 | case 4: |
10008 | /* In DWARF3 DW_AT_data_member_location with |
10009 | DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not |
10010 | constant, so we need to use DW_FORM_udata if we need |
10011 | a large constant. */ |
10012 | if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location) |
10013 | return DW_FORM_udata; |
10014 | return DW_FORM_data4; |
10015 | case 8: |
10016 | if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location) |
10017 | return DW_FORM_udata; |
10018 | return DW_FORM_data8; |
10019 | default: |
10020 | gcc_unreachable (); |
10021 | } |
10022 | case dw_val_class_const_implicit: |
10023 | case dw_val_class_unsigned_const_implicit: |
10024 | case dw_val_class_file_implicit: |
10025 | return DW_FORM_implicit_const; |
10026 | case dw_val_class_const_double: |
10027 | switch (HOST_BITS_PER_WIDE_INT) |
10028 | { |
10029 | case 8: |
10030 | return DW_FORM_data2; |
10031 | case 16: |
10032 | return DW_FORM_data4; |
10033 | case 32: |
10034 | return DW_FORM_data8; |
10035 | case 64: |
10036 | if (dwarf_version >= 5) |
10037 | return DW_FORM_data16; |
10038 | /* FALLTHRU */ |
10039 | default: |
10040 | return DW_FORM_block1; |
10041 | } |
10042 | case dw_val_class_wide_int: |
10043 | switch (get_full_len (op: *a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT) |
10044 | { |
10045 | case 8: |
10046 | return DW_FORM_data1; |
10047 | case 16: |
10048 | return DW_FORM_data2; |
10049 | case 32: |
10050 | return DW_FORM_data4; |
10051 | case 64: |
10052 | return DW_FORM_data8; |
10053 | case 128: |
10054 | if (dwarf_version >= 5) |
10055 | return DW_FORM_data16; |
10056 | /* FALLTHRU */ |
10057 | default: |
10058 | return DW_FORM_block1; |
10059 | } |
10060 | case dw_val_class_symview: |
10061 | /* ??? We might use uleb128, but then we'd have to compute |
10062 | .debug_info offsets in the assembler. */ |
10063 | if (symview_upper_bound <= 0xff) |
10064 | return DW_FORM_data1; |
10065 | else if (symview_upper_bound <= 0xffff) |
10066 | return DW_FORM_data2; |
10067 | else if (symview_upper_bound <= 0xffffffff) |
10068 | return DW_FORM_data4; |
10069 | else |
10070 | return DW_FORM_data8; |
10071 | case dw_val_class_vec: |
10072 | switch (constant_size (value: a->dw_attr_val.v.val_vec.length |
10073 | * a->dw_attr_val.v.val_vec.elt_size)) |
10074 | { |
10075 | case 1: |
10076 | return DW_FORM_block1; |
10077 | case 2: |
10078 | return DW_FORM_block2; |
10079 | case 4: |
10080 | return DW_FORM_block4; |
10081 | default: |
10082 | gcc_unreachable (); |
10083 | } |
10084 | case dw_val_class_flag: |
10085 | if (dwarf_version >= 4) |
10086 | { |
10087 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
10088 | so DW_FORM_flag_present can be used. If that ever changes, |
10089 | we'll need to use DW_FORM_flag and have some optimization |
10090 | in build_abbrev_table that will change those to |
10091 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
10092 | the same abbrev entry. */ |
10093 | gcc_assert (a->dw_attr_val.v.val_flag == 1); |
10094 | return DW_FORM_flag_present; |
10095 | } |
10096 | return DW_FORM_flag; |
10097 | case dw_val_class_die_ref: |
10098 | if (AT_ref_external (a)) |
10099 | { |
10100 | if (AT_ref (a)->comdat_type_p) |
10101 | return DW_FORM_ref_sig8; |
10102 | else |
10103 | return DW_FORM_ref_addr; |
10104 | } |
10105 | else |
10106 | return DW_FORM_ref; |
10107 | case dw_val_class_fde_ref: |
10108 | return DW_FORM_data; |
10109 | case dw_val_class_lbl_id: |
10110 | return (AT_index (a) == NOT_INDEXED |
10111 | ? DW_FORM_addr : dwarf_FORM (form: DW_FORM_addrx)); |
10112 | case dw_val_class_lineptr: |
10113 | case dw_val_class_macptr: |
10114 | case dw_val_class_loclistsptr: |
10115 | return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data; |
10116 | case dw_val_class_str: |
10117 | return AT_string_form (a); |
10118 | case dw_val_class_file: |
10119 | switch (constant_size (value: maybe_emit_file (fd: a->dw_attr_val.v.val_file))) |
10120 | { |
10121 | case 1: |
10122 | return DW_FORM_data1; |
10123 | case 2: |
10124 | return DW_FORM_data2; |
10125 | case 4: |
10126 | return DW_FORM_data4; |
10127 | default: |
10128 | gcc_unreachable (); |
10129 | } |
10130 | |
10131 | case dw_val_class_data8: |
10132 | return DW_FORM_data8; |
10133 | |
10134 | case dw_val_class_high_pc: |
10135 | switch (DWARF2_ADDR_SIZE) |
10136 | { |
10137 | case 1: |
10138 | return DW_FORM_data1; |
10139 | case 2: |
10140 | return DW_FORM_data2; |
10141 | case 4: |
10142 | return DW_FORM_data4; |
10143 | case 8: |
10144 | return DW_FORM_data8; |
10145 | default: |
10146 | gcc_unreachable (); |
10147 | } |
10148 | |
10149 | case dw_val_class_discr_value: |
10150 | return (a->dw_attr_val.v.val_discr_value.pos |
10151 | ? DW_FORM_udata |
10152 | : DW_FORM_sdata); |
10153 | case dw_val_class_discr_list: |
10154 | switch (constant_size (value: size_of_discr_list (discr_list: AT_discr_list (a)))) |
10155 | { |
10156 | case 1: |
10157 | return DW_FORM_block1; |
10158 | case 2: |
10159 | return DW_FORM_block2; |
10160 | case 4: |
10161 | return DW_FORM_block4; |
10162 | default: |
10163 | gcc_unreachable (); |
10164 | } |
10165 | |
10166 | default: |
10167 | gcc_unreachable (); |
10168 | } |
10169 | } |
10170 | |
10171 | /* Output the encoding of an attribute value. */ |
10172 | |
10173 | static void |
10174 | output_value_format (dw_attr_node *a) |
10175 | { |
10176 | enum dwarf_form form = value_format (a); |
10177 | |
10178 | dw2_asm_output_data_uleb128 (form, "(%s)" , dwarf_form_name (form)); |
10179 | } |
10180 | |
10181 | /* Given a die and id, produce the appropriate abbreviations. */ |
10182 | |
10183 | static void |
10184 | output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev) |
10185 | { |
10186 | unsigned ix; |
10187 | dw_attr_node *a_attr; |
10188 | |
10189 | dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)" ); |
10190 | dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)" , |
10191 | dwarf_tag_name (tag: abbrev->die_tag)); |
10192 | |
10193 | if (abbrev->die_child != NULL) |
10194 | dw2_asm_output_data (1, DW_children_yes, "DW_children_yes" ); |
10195 | else |
10196 | dw2_asm_output_data (1, DW_children_no, "DW_children_no" ); |
10197 | |
10198 | for (ix = 0; vec_safe_iterate (v: abbrev->die_attr, ix, ptr: &a_attr); ix++) |
10199 | { |
10200 | dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)" , |
10201 | dwarf_attr_name (attr: a_attr->dw_attr)); |
10202 | output_value_format (a: a_attr); |
10203 | if (value_format (a: a_attr) == DW_FORM_implicit_const) |
10204 | { |
10205 | if (AT_class (a: a_attr) == dw_val_class_file_implicit) |
10206 | { |
10207 | int f = maybe_emit_file (fd: a_attr->dw_attr_val.v.val_file); |
10208 | const char *filename = a_attr->dw_attr_val.v.val_file->filename; |
10209 | dw2_asm_output_data_sleb128 (f, "(%s)" , filename); |
10210 | } |
10211 | else |
10212 | dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL); |
10213 | } |
10214 | } |
10215 | |
10216 | dw2_asm_output_data (1, 0, NULL); |
10217 | dw2_asm_output_data (1, 0, NULL); |
10218 | } |
10219 | |
10220 | |
10221 | /* Output the .debug_abbrev section which defines the DIE abbreviation |
10222 | table. */ |
10223 | |
10224 | static void |
10225 | output_abbrev_section (void) |
10226 | { |
10227 | unsigned int abbrev_id; |
10228 | dw_die_ref abbrev; |
10229 | |
10230 | FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev) |
10231 | if (abbrev_id != 0) |
10232 | output_die_abbrevs (abbrev_id, abbrev); |
10233 | |
10234 | /* Terminate the table. */ |
10235 | dw2_asm_output_data (1, 0, NULL); |
10236 | } |
10237 | |
10238 | /* Return a new location list, given the begin and end range, and the |
10239 | expression. */ |
10240 | |
10241 | static inline dw_loc_list_ref |
10242 | new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin, |
10243 | const char *end, var_loc_view vend, |
10244 | const char *section) |
10245 | { |
10246 | dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> (); |
10247 | |
10248 | retlist->begin = begin; |
10249 | retlist->begin_entry = NULL; |
10250 | retlist->end = end; |
10251 | retlist->end_entry = NULL; |
10252 | retlist->expr = expr; |
10253 | retlist->section = section; |
10254 | retlist->vbegin = vbegin; |
10255 | retlist->vend = vend; |
10256 | |
10257 | return retlist; |
10258 | } |
10259 | |
10260 | /* Return true iff there's any nonzero view number in the loc list. |
10261 | |
10262 | ??? When views are not enabled, we'll often extend a single range |
10263 | to the entire function, so that we emit a single location |
10264 | expression rather than a location list. With views, even with a |
10265 | single range, we'll output a list if start or end have a nonzero |
10266 | view. If we change this, we may want to stop splitting a single |
10267 | range in dw_loc_list just because of a nonzero view, even if it |
10268 | straddles across hot/cold partitions. */ |
10269 | |
10270 | static bool |
10271 | loc_list_has_views (dw_loc_list_ref list) |
10272 | { |
10273 | if (!debug_variable_location_views) |
10274 | return false; |
10275 | |
10276 | for (dw_loc_list_ref loc = list; |
10277 | loc != NULL; loc = loc->dw_loc_next) |
10278 | if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend)) |
10279 | return true; |
10280 | |
10281 | return false; |
10282 | } |
10283 | |
10284 | /* Generate a new internal symbol for this location list node, if it |
10285 | hasn't got one yet. */ |
10286 | |
10287 | static inline void |
10288 | gen_llsym (dw_loc_list_ref list) |
10289 | { |
10290 | gcc_assert (!list->ll_symbol); |
10291 | list->ll_symbol = gen_internal_sym (prefix: "LLST" ); |
10292 | |
10293 | if (!loc_list_has_views (list)) |
10294 | return; |
10295 | |
10296 | if (dwarf2out_locviews_in_attribute ()) |
10297 | { |
10298 | /* Use the same label_num for the view list. */ |
10299 | label_num--; |
10300 | list->vl_symbol = gen_internal_sym (prefix: "LVUS" ); |
10301 | } |
10302 | else |
10303 | list->vl_symbol = list->ll_symbol; |
10304 | } |
10305 | |
10306 | /* Generate a symbol for the list, but only if we really want to emit |
10307 | it as a list. */ |
10308 | |
10309 | static inline void |
10310 | maybe_gen_llsym (dw_loc_list_ref list) |
10311 | { |
10312 | if (!list || (!list->dw_loc_next && !loc_list_has_views (list))) |
10313 | return; |
10314 | |
10315 | gen_llsym (list); |
10316 | } |
10317 | |
10318 | /* Determine whether or not to skip loc_list entry CURR. If SIZEP is |
10319 | NULL, don't consider size of the location expression. If we're not |
10320 | to skip it, and SIZEP is non-null, store the size of CURR->expr's |
10321 | representation in *SIZEP. */ |
10322 | |
10323 | static bool |
10324 | skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL) |
10325 | { |
10326 | /* Don't output an entry that starts and ends at the same address. */ |
10327 | if (strcmp (s1: curr->begin, s2: curr->end) == 0 |
10328 | && curr->vbegin == curr->vend && !curr->force) |
10329 | return true; |
10330 | |
10331 | if (!sizep) |
10332 | return false; |
10333 | |
10334 | unsigned long size = size_of_locs (loc: curr->expr); |
10335 | |
10336 | /* If the expression is too large, drop it on the floor. We could |
10337 | perhaps put it into DW_TAG_dwarf_procedure and refer to that |
10338 | in the expression, but >= 64KB expressions for a single value |
10339 | in a single range are unlikely very useful. */ |
10340 | if (dwarf_version < 5 && size > 0xffff) |
10341 | return true; |
10342 | |
10343 | *sizep = size; |
10344 | |
10345 | return false; |
10346 | } |
10347 | |
10348 | /* Output a view pair loclist entry for CURR, if it requires one. */ |
10349 | |
10350 | static void |
10351 | dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr) |
10352 | { |
10353 | if (!dwarf2out_locviews_in_loclist ()) |
10354 | return; |
10355 | |
10356 | if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend)) |
10357 | return; |
10358 | |
10359 | #ifdef DW_LLE_view_pair |
10360 | dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair" ); |
10361 | |
10362 | if (dwarf2out_as_locview_support) |
10363 | { |
10364 | if (ZERO_VIEW_P (curr->vbegin)) |
10365 | dw2_asm_output_data_uleb128 (0, "Location view begin" ); |
10366 | else |
10367 | { |
10368 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
10369 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vbegin); |
10370 | dw2_asm_output_symname_uleb128 (label, "Location view begin" ); |
10371 | } |
10372 | |
10373 | if (ZERO_VIEW_P (curr->vend)) |
10374 | dw2_asm_output_data_uleb128 (0, "Location view end" ); |
10375 | else |
10376 | { |
10377 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
10378 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vend); |
10379 | dw2_asm_output_symname_uleb128 (label, "Location view end" ); |
10380 | } |
10381 | } |
10382 | else |
10383 | { |
10384 | dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin" ); |
10385 | dw2_asm_output_data_uleb128 (curr->vend, "Location view end" ); |
10386 | } |
10387 | #endif /* DW_LLE_view_pair */ |
10388 | |
10389 | return; |
10390 | } |
10391 | |
10392 | /* Output the location list given to us. */ |
10393 | |
10394 | static void |
10395 | output_loc_list (dw_loc_list_ref list_head) |
10396 | { |
10397 | int vcount = 0, lcount = 0; |
10398 | |
10399 | if (list_head->emitted) |
10400 | return; |
10401 | list_head->emitted = true; |
10402 | |
10403 | if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ()) |
10404 | { |
10405 | ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol); |
10406 | |
10407 | for (dw_loc_list_ref curr = list_head; curr != NULL; |
10408 | curr = curr->dw_loc_next) |
10409 | { |
10410 | unsigned long size; |
10411 | |
10412 | if (skip_loc_list_entry (curr, sizep: &size)) |
10413 | continue; |
10414 | |
10415 | vcount++; |
10416 | |
10417 | /* ?? dwarf_split_debug_info? */ |
10418 | if (dwarf2out_as_locview_support) |
10419 | { |
10420 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
10421 | |
10422 | if (!ZERO_VIEW_P (curr->vbegin)) |
10423 | { |
10424 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vbegin); |
10425 | dw2_asm_output_symname_uleb128 (label, |
10426 | "View list begin (%s)" , |
10427 | list_head->vl_symbol); |
10428 | } |
10429 | else |
10430 | dw2_asm_output_data_uleb128 (0, |
10431 | "View list begin (%s)" , |
10432 | list_head->vl_symbol); |
10433 | |
10434 | if (!ZERO_VIEW_P (curr->vend)) |
10435 | { |
10436 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , curr->vend); |
10437 | dw2_asm_output_symname_uleb128 (label, |
10438 | "View list end (%s)" , |
10439 | list_head->vl_symbol); |
10440 | } |
10441 | else |
10442 | dw2_asm_output_data_uleb128 (0, |
10443 | "View list end (%s)" , |
10444 | list_head->vl_symbol); |
10445 | } |
10446 | else |
10447 | { |
10448 | dw2_asm_output_data_uleb128 (curr->vbegin, |
10449 | "View list begin (%s)" , |
10450 | list_head->vl_symbol); |
10451 | dw2_asm_output_data_uleb128 (curr->vend, |
10452 | "View list end (%s)" , |
10453 | list_head->vl_symbol); |
10454 | } |
10455 | } |
10456 | } |
10457 | |
10458 | ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol); |
10459 | |
10460 | const char *last_section = NULL; |
10461 | const char *base_label = NULL; |
10462 | |
10463 | /* Walk the location list, and output each range + expression. */ |
10464 | for (dw_loc_list_ref curr = list_head; curr != NULL; |
10465 | curr = curr->dw_loc_next) |
10466 | { |
10467 | unsigned long size; |
10468 | |
10469 | /* Skip this entry? If we skip it here, we must skip it in the |
10470 | view list above as well. */ |
10471 | if (skip_loc_list_entry (curr, sizep: &size)) |
10472 | continue; |
10473 | |
10474 | lcount++; |
10475 | |
10476 | if (dwarf_version >= 5) |
10477 | { |
10478 | if (dwarf_split_debug_info && HAVE_AS_LEB128) |
10479 | { |
10480 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10481 | /* For -gsplit-dwarf, emit DW_LLE_startx_length, which has |
10482 | uleb128 index into .debug_addr and uleb128 length. */ |
10483 | dw2_asm_output_data (1, DW_LLE_startx_length, |
10484 | "DW_LLE_startx_length (%s)" , |
10485 | list_head->ll_symbol); |
10486 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
10487 | "Location list range start index " |
10488 | "(%s)" , curr->begin); |
10489 | dw2_asm_output_delta_uleb128 (curr->end, curr->begin, |
10490 | "Location list length (%s)" , |
10491 | list_head->ll_symbol); |
10492 | } |
10493 | else if (dwarf_split_debug_info) |
10494 | { |
10495 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10496 | /* For -gsplit-dwarf without usable .uleb128 support, emit |
10497 | DW_LLE_startx_endx, which has two uleb128 indexes into |
10498 | .debug_addr. */ |
10499 | dw2_asm_output_data (1, DW_LLE_startx_endx, |
10500 | "DW_LLE_startx_endx (%s)" , |
10501 | list_head->ll_symbol); |
10502 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
10503 | "Location list range start index " |
10504 | "(%s)" , curr->begin); |
10505 | dw2_asm_output_data_uleb128 (curr->end_entry->index, |
10506 | "Location list range end index " |
10507 | "(%s)" , curr->end); |
10508 | } |
10509 | else if (!have_multiple_function_sections && HAVE_AS_LEB128) |
10510 | { |
10511 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10512 | /* If all code is in .text section, the base address is |
10513 | already provided by the CU attributes. Use |
10514 | DW_LLE_offset_pair where both addresses are uleb128 encoded |
10515 | offsets against that base. */ |
10516 | dw2_asm_output_data (1, DW_LLE_offset_pair, |
10517 | "DW_LLE_offset_pair (%s)" , |
10518 | list_head->ll_symbol); |
10519 | dw2_asm_output_delta_uleb128 (curr->begin, curr->section, |
10520 | "Location list begin address (%s)" , |
10521 | list_head->ll_symbol); |
10522 | dw2_asm_output_delta_uleb128 (curr->end, curr->section, |
10523 | "Location list end address (%s)" , |
10524 | list_head->ll_symbol); |
10525 | } |
10526 | else if (HAVE_AS_LEB128) |
10527 | { |
10528 | /* Otherwise, find out how many consecutive entries could share |
10529 | the same base entry. If just one, emit DW_LLE_start_length, |
10530 | otherwise emit DW_LLE_base_address for the base address |
10531 | followed by a series of DW_LLE_offset_pair. */ |
10532 | if (last_section == NULL || curr->section != last_section) |
10533 | { |
10534 | dw_loc_list_ref curr2; |
10535 | for (curr2 = curr->dw_loc_next; curr2 != NULL; |
10536 | curr2 = curr2->dw_loc_next) |
10537 | { |
10538 | if (strcmp (s1: curr2->begin, s2: curr2->end) == 0 |
10539 | && !curr2->force) |
10540 | continue; |
10541 | break; |
10542 | } |
10543 | if (curr2 == NULL || curr->section != curr2->section) |
10544 | last_section = NULL; |
10545 | else |
10546 | { |
10547 | last_section = curr->section; |
10548 | base_label = curr->begin; |
10549 | dw2_asm_output_data (1, DW_LLE_base_address, |
10550 | "DW_LLE_base_address (%s)" , |
10551 | list_head->ll_symbol); |
10552 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label, |
10553 | "Base address (%s)" , |
10554 | list_head->ll_symbol); |
10555 | } |
10556 | } |
10557 | /* Only one entry with the same base address. Use |
10558 | DW_LLE_start_length with absolute address and uleb128 |
10559 | length. */ |
10560 | if (last_section == NULL) |
10561 | { |
10562 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10563 | dw2_asm_output_data (1, DW_LLE_start_length, |
10564 | "DW_LLE_start_length (%s)" , |
10565 | list_head->ll_symbol); |
10566 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
10567 | "Location list begin address (%s)" , |
10568 | list_head->ll_symbol); |
10569 | dw2_asm_output_delta_uleb128 (curr->end, curr->begin, |
10570 | "Location list length " |
10571 | "(%s)" , list_head->ll_symbol); |
10572 | } |
10573 | /* Otherwise emit DW_LLE_offset_pair, relative to above emitted |
10574 | DW_LLE_base_address. */ |
10575 | else |
10576 | { |
10577 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10578 | dw2_asm_output_data (1, DW_LLE_offset_pair, |
10579 | "DW_LLE_offset_pair (%s)" , |
10580 | list_head->ll_symbol); |
10581 | dw2_asm_output_delta_uleb128 (curr->begin, base_label, |
10582 | "Location list begin address " |
10583 | "(%s)" , list_head->ll_symbol); |
10584 | dw2_asm_output_delta_uleb128 (curr->end, base_label, |
10585 | "Location list end address " |
10586 | "(%s)" , list_head->ll_symbol); |
10587 | } |
10588 | } |
10589 | /* The assembler does not support .uleb128 directive. Emit |
10590 | DW_LLE_start_end with a pair of absolute addresses. */ |
10591 | else |
10592 | { |
10593 | dwarf2out_maybe_output_loclist_view_pair (curr); |
10594 | dw2_asm_output_data (1, DW_LLE_start_end, |
10595 | "DW_LLE_start_end (%s)" , |
10596 | list_head->ll_symbol); |
10597 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
10598 | "Location list begin address (%s)" , |
10599 | list_head->ll_symbol); |
10600 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end, |
10601 | "Location list end address (%s)" , |
10602 | list_head->ll_symbol); |
10603 | } |
10604 | } |
10605 | else if (dwarf_split_debug_info) |
10606 | { |
10607 | /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr |
10608 | and 4 byte length. */ |
10609 | dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry, |
10610 | "Location list start/length entry (%s)" , |
10611 | list_head->ll_symbol); |
10612 | dw2_asm_output_data_uleb128 (curr->begin_entry->index, |
10613 | "Location list range start index (%s)" , |
10614 | curr->begin); |
10615 | /* The length field is 4 bytes. If we ever need to support |
10616 | an 8-byte length, we can add a new DW_LLE code or fall back |
10617 | to DW_LLE_GNU_start_end_entry. */ |
10618 | dw2_asm_output_delta (4, curr->end, curr->begin, |
10619 | "Location list range length (%s)" , |
10620 | list_head->ll_symbol); |
10621 | } |
10622 | else if (!have_multiple_function_sections) |
10623 | { |
10624 | /* Pair of relative addresses against start of text section. */ |
10625 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section, |
10626 | "Location list begin address (%s)" , |
10627 | list_head->ll_symbol); |
10628 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section, |
10629 | "Location list end address (%s)" , |
10630 | list_head->ll_symbol); |
10631 | } |
10632 | else |
10633 | { |
10634 | /* Pair of absolute addresses. */ |
10635 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin, |
10636 | "Location list begin address (%s)" , |
10637 | list_head->ll_symbol); |
10638 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end, |
10639 | "Location list end address (%s)" , |
10640 | list_head->ll_symbol); |
10641 | } |
10642 | |
10643 | /* Output the block length for this list of location operations. */ |
10644 | if (dwarf_version >= 5) |
10645 | dw2_asm_output_data_uleb128 (size, "Location expression size" ); |
10646 | else |
10647 | { |
10648 | gcc_assert (size <= 0xffff); |
10649 | dw2_asm_output_data (2, size, "Location expression size" ); |
10650 | } |
10651 | |
10652 | output_loc_sequence (loc: curr->expr, for_eh_or_skip: -1); |
10653 | } |
10654 | |
10655 | /* And finally list termination. */ |
10656 | if (dwarf_version >= 5) |
10657 | dw2_asm_output_data (1, DW_LLE_end_of_list, |
10658 | "DW_LLE_end_of_list (%s)" , list_head->ll_symbol); |
10659 | else if (dwarf_split_debug_info) |
10660 | dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry, |
10661 | "Location list terminator (%s)" , |
10662 | list_head->ll_symbol); |
10663 | else |
10664 | { |
10665 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, |
10666 | "Location list terminator begin (%s)" , |
10667 | list_head->ll_symbol); |
10668 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, |
10669 | "Location list terminator end (%s)" , |
10670 | list_head->ll_symbol); |
10671 | } |
10672 | |
10673 | gcc_assert (!list_head->vl_symbol |
10674 | || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0)); |
10675 | } |
10676 | |
10677 | /* Output a range_list offset into the .debug_ranges or .debug_rnglists |
10678 | section. Emit a relocated reference if val_entry is NULL, otherwise, |
10679 | emit an indirect reference. */ |
10680 | |
10681 | static void |
10682 | output_range_list_offset (dw_attr_node *a) |
10683 | { |
10684 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
10685 | |
10686 | if (a->dw_attr_val.val_entry == RELOCATED_OFFSET) |
10687 | { |
10688 | if (dwarf_version >= 5) |
10689 | { |
10690 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
10691 | dw2_asm_output_offset (dwarf_offset_size, r->label, |
10692 | debug_ranges_section, "%s" , name); |
10693 | } |
10694 | else |
10695 | { |
10696 | char *p = strchr (s: ranges_section_label, c: '\0'); |
10697 | sprintf (s: p, format: "+" HOST_WIDE_INT_PRINT_HEX, |
10698 | a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE); |
10699 | dw2_asm_output_offset (dwarf_offset_size, ranges_section_label, |
10700 | debug_ranges_section, "%s" , name); |
10701 | *p = '\0'; |
10702 | } |
10703 | } |
10704 | else if (dwarf_version >= 5) |
10705 | { |
10706 | dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset]; |
10707 | gcc_assert (rnglist_idx); |
10708 | dw2_asm_output_data_uleb128 (r->idx, "%s" , name); |
10709 | } |
10710 | else |
10711 | dw2_asm_output_data (dwarf_offset_size, |
10712 | a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE, |
10713 | "%s (offset from %s)" , name, ranges_section_label); |
10714 | } |
10715 | |
10716 | /* Output the offset into the debug_loc section. */ |
10717 | |
10718 | static void |
10719 | output_loc_list_offset (dw_attr_node *a) |
10720 | { |
10721 | char *sym = AT_loc_list (a)->ll_symbol; |
10722 | |
10723 | gcc_assert (sym); |
10724 | if (!dwarf_split_debug_info) |
10725 | dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section, |
10726 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
10727 | else if (dwarf_version >= 5) |
10728 | { |
10729 | gcc_assert (AT_loc_list (a)->num_assigned); |
10730 | dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)" , |
10731 | dwarf_attr_name (attr: a->dw_attr), |
10732 | sym); |
10733 | } |
10734 | else |
10735 | dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label, |
10736 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
10737 | } |
10738 | |
10739 | /* Output the offset into the debug_loc section. */ |
10740 | |
10741 | static void |
10742 | output_view_list_offset (dw_attr_node *a) |
10743 | { |
10744 | char *sym = (*AT_loc_list_ptr (a))->vl_symbol; |
10745 | |
10746 | gcc_assert (sym); |
10747 | if (dwarf_split_debug_info) |
10748 | dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label, |
10749 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
10750 | else |
10751 | dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section, |
10752 | "%s" , dwarf_attr_name (attr: a->dw_attr)); |
10753 | } |
10754 | |
10755 | /* Output an attribute's index or value appropriately. */ |
10756 | |
10757 | static void |
10758 | output_attr_index_or_value (dw_attr_node *a) |
10759 | { |
10760 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
10761 | |
10762 | if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED) |
10763 | { |
10764 | dw2_asm_output_data_uleb128 (AT_index (a), "%s" , name); |
10765 | return; |
10766 | } |
10767 | switch (AT_class (a)) |
10768 | { |
10769 | case dw_val_class_addr: |
10770 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s" , name); |
10771 | break; |
10772 | case dw_val_class_high_pc: |
10773 | case dw_val_class_lbl_id: |
10774 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s" , name); |
10775 | break; |
10776 | default: |
10777 | gcc_unreachable (); |
10778 | } |
10779 | } |
10780 | |
10781 | /* Output a type signature. */ |
10782 | |
10783 | static inline void |
10784 | output_signature (const char *sig, const char *name) |
10785 | { |
10786 | int i; |
10787 | |
10788 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
10789 | dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name); |
10790 | } |
10791 | |
10792 | /* Output a discriminant value. */ |
10793 | |
10794 | static inline void |
10795 | output_discr_value (dw_discr_value *discr_value, const char *name) |
10796 | { |
10797 | if (discr_value->pos) |
10798 | dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s" , name); |
10799 | else |
10800 | dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s" , name); |
10801 | } |
10802 | |
10803 | /* Output the DIE and its attributes. Called recursively to generate |
10804 | the definitions of each child DIE. */ |
10805 | |
10806 | static void |
10807 | output_die (dw_die_ref die) |
10808 | { |
10809 | dw_attr_node *a; |
10810 | dw_die_ref c; |
10811 | unsigned long size; |
10812 | unsigned ix; |
10813 | |
10814 | dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)" , |
10815 | (unsigned long)die->die_offset, |
10816 | dwarf_tag_name (tag: die->die_tag)); |
10817 | |
10818 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
10819 | { |
10820 | const char *name = dwarf_attr_name (attr: a->dw_attr); |
10821 | |
10822 | switch (AT_class (a)) |
10823 | { |
10824 | case dw_val_class_addr: |
10825 | output_attr_index_or_value (a); |
10826 | break; |
10827 | |
10828 | case dw_val_class_offset: |
10829 | dw2_asm_output_data (dwarf_offset_size, a->dw_attr_val.v.val_offset, |
10830 | "%s" , name); |
10831 | break; |
10832 | |
10833 | case dw_val_class_range_list: |
10834 | output_range_list_offset (a); |
10835 | break; |
10836 | |
10837 | case dw_val_class_loc: |
10838 | size = size_of_locs (loc: AT_loc (a)); |
10839 | |
10840 | /* Output the block length for this list of location operations. */ |
10841 | if (dwarf_version >= 4) |
10842 | dw2_asm_output_data_uleb128 (size, "%s" , name); |
10843 | else |
10844 | dw2_asm_output_data (constant_size (value: size), size, "%s" , name); |
10845 | |
10846 | output_loc_sequence (loc: AT_loc (a), for_eh_or_skip: -1); |
10847 | break; |
10848 | |
10849 | case dw_val_class_const: |
10850 | /* ??? It would be slightly more efficient to use a scheme like is |
10851 | used for unsigned constants below, but gdb 4.x does not sign |
10852 | extend. Gdb 5.x does sign extend. */ |
10853 | dw2_asm_output_data_sleb128 (AT_int (a), "%s" , name); |
10854 | break; |
10855 | |
10856 | case dw_val_class_unsigned_const: |
10857 | { |
10858 | int csize = constant_size (value: AT_unsigned (a)); |
10859 | if (dwarf_version == 3 |
10860 | && a->dw_attr == DW_AT_data_member_location |
10861 | && csize >= 4) |
10862 | dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s" , name); |
10863 | else |
10864 | dw2_asm_output_data (csize, AT_unsigned (a), "%s" , name); |
10865 | } |
10866 | break; |
10867 | |
10868 | case dw_val_class_symview: |
10869 | { |
10870 | int vsize; |
10871 | if (symview_upper_bound <= 0xff) |
10872 | vsize = 1; |
10873 | else if (symview_upper_bound <= 0xffff) |
10874 | vsize = 2; |
10875 | else if (symview_upper_bound <= 0xffffffff) |
10876 | vsize = 4; |
10877 | else |
10878 | vsize = 8; |
10879 | dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view, |
10880 | "%s" , name); |
10881 | } |
10882 | break; |
10883 | |
10884 | case dw_val_class_const_implicit: |
10885 | if (flag_debug_asm) |
10886 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (" |
10887 | HOST_WIDE_INT_PRINT_DEC ")\n" , |
10888 | ASM_COMMENT_START, name, AT_int (a)); |
10889 | break; |
10890 | |
10891 | case dw_val_class_unsigned_const_implicit: |
10892 | if (flag_debug_asm) |
10893 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (" |
10894 | HOST_WIDE_INT_PRINT_HEX ")\n" , |
10895 | ASM_COMMENT_START, name, AT_unsigned (a)); |
10896 | break; |
10897 | |
10898 | case dw_val_class_const_double: |
10899 | { |
10900 | unsigned HOST_WIDE_INT first, second; |
10901 | |
10902 | if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS) |
10903 | dw2_asm_output_data (1, |
10904 | HOST_BITS_PER_DOUBLE_INT |
10905 | / HOST_BITS_PER_CHAR, |
10906 | NULL); |
10907 | |
10908 | if (WORDS_BIG_ENDIAN) |
10909 | { |
10910 | first = a->dw_attr_val.v.val_double.high; |
10911 | second = a->dw_attr_val.v.val_double.low; |
10912 | } |
10913 | else |
10914 | { |
10915 | first = a->dw_attr_val.v.val_double.low; |
10916 | second = a->dw_attr_val.v.val_double.high; |
10917 | } |
10918 | |
10919 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
10920 | first, "%s" , name); |
10921 | dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR, |
10922 | second, NULL); |
10923 | } |
10924 | break; |
10925 | |
10926 | case dw_val_class_wide_int: |
10927 | { |
10928 | int i; |
10929 | int len = get_full_len (op: *a->dw_attr_val.v.val_wide); |
10930 | int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR; |
10931 | if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS) |
10932 | dw2_asm_output_data (1, get_full_len (op: *a->dw_attr_val.v.val_wide) |
10933 | * l, NULL); |
10934 | |
10935 | if (WORDS_BIG_ENDIAN) |
10936 | for (i = len - 1; i >= 0; --i) |
10937 | { |
10938 | dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i), |
10939 | "%s" , name); |
10940 | name = "" ; |
10941 | } |
10942 | else |
10943 | for (i = 0; i < len; ++i) |
10944 | { |
10945 | dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i), |
10946 | "%s" , name); |
10947 | name = "" ; |
10948 | } |
10949 | } |
10950 | break; |
10951 | |
10952 | case dw_val_class_vec: |
10953 | { |
10954 | unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size; |
10955 | unsigned int len = a->dw_attr_val.v.val_vec.length; |
10956 | unsigned int i; |
10957 | unsigned char *p; |
10958 | |
10959 | dw2_asm_output_data (constant_size (value: len * elt_size), |
10960 | len * elt_size, "%s" , name); |
10961 | if (elt_size > sizeof (HOST_WIDE_INT)) |
10962 | { |
10963 | elt_size /= 2; |
10964 | len *= 2; |
10965 | } |
10966 | for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array; |
10967 | i < len; |
10968 | i++, p += elt_size) |
10969 | dw2_asm_output_data (elt_size, extract_int (p, elt_size), |
10970 | "fp or vector constant word %u" , i); |
10971 | break; |
10972 | } |
10973 | |
10974 | case dw_val_class_flag: |
10975 | if (dwarf_version >= 4) |
10976 | { |
10977 | /* Currently all add_AT_flag calls pass in 1 as last argument, |
10978 | so DW_FORM_flag_present can be used. If that ever changes, |
10979 | we'll need to use DW_FORM_flag and have some optimization |
10980 | in build_abbrev_table that will change those to |
10981 | DW_FORM_flag_present if it is set to 1 in all DIEs using |
10982 | the same abbrev entry. */ |
10983 | gcc_assert (AT_flag (a) == 1); |
10984 | if (flag_debug_asm) |
10985 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s\n" , |
10986 | ASM_COMMENT_START, name); |
10987 | break; |
10988 | } |
10989 | dw2_asm_output_data (1, AT_flag (a), "%s" , name); |
10990 | break; |
10991 | |
10992 | case dw_val_class_loc_list: |
10993 | output_loc_list_offset (a); |
10994 | break; |
10995 | |
10996 | case dw_val_class_view_list: |
10997 | output_view_list_offset (a); |
10998 | break; |
10999 | |
11000 | case dw_val_class_die_ref: |
11001 | if (AT_ref_external (a)) |
11002 | { |
11003 | if (AT_ref (a)->comdat_type_p) |
11004 | { |
11005 | comdat_type_node *type_node |
11006 | = AT_ref (a)->die_id.die_type_node; |
11007 | |
11008 | gcc_assert (type_node); |
11009 | output_signature (sig: type_node->signature, name); |
11010 | } |
11011 | else |
11012 | { |
11013 | const char *sym = AT_ref (a)->die_id.die_symbol; |
11014 | int size; |
11015 | |
11016 | gcc_assert (sym); |
11017 | /* In DWARF2, DW_FORM_ref_addr is sized by target address |
11018 | length, whereas in DWARF3 it's always sized as an |
11019 | offset. */ |
11020 | if (dwarf_version == 2) |
11021 | size = DWARF2_ADDR_SIZE; |
11022 | else |
11023 | size = dwarf_offset_size; |
11024 | /* ??? We cannot unconditionally output die_offset if |
11025 | non-zero - others might create references to those |
11026 | DIEs via symbols. |
11027 | And we do not clear its DIE offset after outputting it |
11028 | (and the label refers to the actual DIEs, not the |
11029 | DWARF CU unit header which is when using label + offset |
11030 | would be the correct thing to do). |
11031 | ??? This is the reason for the with_offset flag. */ |
11032 | if (AT_ref (a)->with_offset) |
11033 | dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset, |
11034 | debug_info_section, "%s" , name); |
11035 | else |
11036 | dw2_asm_output_offset (size, sym, debug_info_section, "%s" , |
11037 | name); |
11038 | } |
11039 | } |
11040 | else |
11041 | { |
11042 | gcc_assert (AT_ref (a)->die_offset); |
11043 | dw2_asm_output_data (dwarf_offset_size, AT_ref (a)->die_offset, |
11044 | "%s" , name); |
11045 | } |
11046 | break; |
11047 | |
11048 | case dw_val_class_fde_ref: |
11049 | { |
11050 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
11051 | |
11052 | ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL, |
11053 | a->dw_attr_val.v.val_fde_index * 2); |
11054 | dw2_asm_output_offset (dwarf_offset_size, l1, debug_frame_section, |
11055 | "%s" , name); |
11056 | } |
11057 | break; |
11058 | |
11059 | case dw_val_class_vms_delta: |
11060 | #ifdef ASM_OUTPUT_DWARF_VMS_DELTA |
11061 | dw2_asm_output_vms_delta (dwarf_offset_size, |
11062 | AT_vms_delta2 (a), AT_vms_delta1 (a), |
11063 | "%s" , name); |
11064 | #else |
11065 | dw2_asm_output_delta (dwarf_offset_size, |
11066 | AT_vms_delta2 (a), AT_vms_delta1 (a), |
11067 | "%s" , name); |
11068 | #endif |
11069 | break; |
11070 | |
11071 | case dw_val_class_lbl_id: |
11072 | output_attr_index_or_value (a); |
11073 | break; |
11074 | |
11075 | case dw_val_class_lineptr: |
11076 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
11077 | debug_line_section, "%s" , name); |
11078 | break; |
11079 | |
11080 | case dw_val_class_macptr: |
11081 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
11082 | debug_macinfo_section, "%s" , name); |
11083 | break; |
11084 | |
11085 | case dw_val_class_loclistsptr: |
11086 | dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a), |
11087 | debug_loc_section, "%s" , name); |
11088 | break; |
11089 | |
11090 | case dw_val_class_str: |
11091 | if (a->dw_attr_val.v.val_str->form == DW_FORM_strp) |
11092 | dw2_asm_output_offset (dwarf_offset_size, |
11093 | a->dw_attr_val.v.val_str->label, |
11094 | debug_str_section, |
11095 | "%s: \"%s\"" , name, AT_string (a)); |
11096 | else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp) |
11097 | dw2_asm_output_offset (dwarf_offset_size, |
11098 | a->dw_attr_val.v.val_str->label, |
11099 | debug_line_str_section, |
11100 | "%s: \"%s\"" , name, AT_string (a)); |
11101 | else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (form: DW_FORM_strx)) |
11102 | dw2_asm_output_data_uleb128 (AT_index (a), |
11103 | "%s: \"%s\"" , name, AT_string (a)); |
11104 | else |
11105 | dw2_asm_output_nstring (AT_string (a), -1, "%s" , name); |
11106 | break; |
11107 | |
11108 | case dw_val_class_file: |
11109 | { |
11110 | int f = maybe_emit_file (fd: a->dw_attr_val.v.val_file); |
11111 | |
11112 | dw2_asm_output_data (constant_size (value: f), f, "%s (%s)" , name, |
11113 | a->dw_attr_val.v.val_file->filename); |
11114 | break; |
11115 | } |
11116 | |
11117 | case dw_val_class_file_implicit: |
11118 | if (flag_debug_asm) |
11119 | fprintf (stream: asm_out_file, format: "\t\t\t%s %s (%d, %s)\n" , |
11120 | ASM_COMMENT_START, name, |
11121 | maybe_emit_file (fd: a->dw_attr_val.v.val_file), |
11122 | a->dw_attr_val.v.val_file->filename); |
11123 | break; |
11124 | |
11125 | case dw_val_class_data8: |
11126 | { |
11127 | int i; |
11128 | |
11129 | for (i = 0; i < 8; i++) |
11130 | dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i], |
11131 | i == 0 ? "%s" : NULL, name); |
11132 | break; |
11133 | } |
11134 | |
11135 | case dw_val_class_high_pc: |
11136 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a), |
11137 | get_AT_low_pc (die), "DW_AT_high_pc" ); |
11138 | break; |
11139 | |
11140 | case dw_val_class_discr_value: |
11141 | output_discr_value (discr_value: &a->dw_attr_val.v.val_discr_value, name); |
11142 | break; |
11143 | |
11144 | case dw_val_class_discr_list: |
11145 | { |
11146 | dw_discr_list_ref list = AT_discr_list (a); |
11147 | const int size = size_of_discr_list (discr_list: list); |
11148 | |
11149 | /* This is a block, so output its length first. */ |
11150 | dw2_asm_output_data (constant_size (value: size), size, |
11151 | "%s: block size" , name); |
11152 | |
11153 | for (; list != NULL; list = list->dw_discr_next) |
11154 | { |
11155 | /* One byte for the discriminant value descriptor, and then as |
11156 | many LEB128 numbers as required. */ |
11157 | if (list->dw_discr_range) |
11158 | dw2_asm_output_data (1, DW_DSC_range, |
11159 | "%s: DW_DSC_range" , name); |
11160 | else |
11161 | dw2_asm_output_data (1, DW_DSC_label, |
11162 | "%s: DW_DSC_label" , name); |
11163 | |
11164 | output_discr_value (discr_value: &list->dw_discr_lower_bound, name); |
11165 | if (list->dw_discr_range) |
11166 | output_discr_value (discr_value: &list->dw_discr_upper_bound, name); |
11167 | } |
11168 | break; |
11169 | } |
11170 | |
11171 | default: |
11172 | gcc_unreachable (); |
11173 | } |
11174 | } |
11175 | |
11176 | FOR_EACH_CHILD (die, c, output_die (c)); |
11177 | |
11178 | /* Add null byte to terminate sibling list. */ |
11179 | if (die->die_child != NULL) |
11180 | dw2_asm_output_data (1, 0, "end of children of DIE %#lx" , |
11181 | (unsigned long) die->die_offset); |
11182 | } |
11183 | |
11184 | /* Output the dwarf version number. */ |
11185 | |
11186 | static void |
11187 | output_dwarf_version () |
11188 | { |
11189 | /* ??? For now, if -gdwarf-6 is specified, we output version 5 with |
11190 | views in loclist. That will change eventually. */ |
11191 | if (dwarf_version == 6) |
11192 | { |
11193 | static bool once; |
11194 | if (!once) |
11195 | { |
11196 | warning (0, "%<-gdwarf-6%> is output as version 5 with " |
11197 | "incompatibilities" ); |
11198 | once = true; |
11199 | } |
11200 | dw2_asm_output_data (2, 5, "DWARF version number" ); |
11201 | } |
11202 | else |
11203 | dw2_asm_output_data (2, dwarf_version, "DWARF version number" ); |
11204 | } |
11205 | |
11206 | /* Output the compilation unit that appears at the beginning of the |
11207 | .debug_info section, and precedes the DIE descriptions. */ |
11208 | |
11209 | static void |
11210 | (enum dwarf_unit_type ut) |
11211 | { |
11212 | if (!XCOFF_DEBUGGING_INFO) |
11213 | { |
11214 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
11215 | dw2_asm_output_data (4, 0xffffffff, |
11216 | "Initial length escape value indicating 64-bit DWARF extension" ); |
11217 | dw2_asm_output_data (dwarf_offset_size, |
11218 | next_die_offset - DWARF_INITIAL_LENGTH_SIZE, |
11219 | "Length of Compilation Unit Info" ); |
11220 | } |
11221 | |
11222 | output_dwarf_version (); |
11223 | if (dwarf_version >= 5) |
11224 | { |
11225 | const char *name; |
11226 | switch (ut) |
11227 | { |
11228 | case DW_UT_compile: name = "DW_UT_compile" ; break; |
11229 | case DW_UT_type: name = "DW_UT_type" ; break; |
11230 | case DW_UT_split_compile: name = "DW_UT_split_compile" ; break; |
11231 | case DW_UT_split_type: name = "DW_UT_split_type" ; break; |
11232 | default: gcc_unreachable (); |
11233 | } |
11234 | dw2_asm_output_data (1, ut, "%s" , name); |
11235 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
11236 | } |
11237 | dw2_asm_output_offset (dwarf_offset_size, abbrev_section_label, |
11238 | debug_abbrev_section, |
11239 | "Offset Into Abbrev. Section" ); |
11240 | if (dwarf_version < 5) |
11241 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
11242 | } |
11243 | |
11244 | /* Output the compilation unit DIE and its children. */ |
11245 | |
11246 | static void |
11247 | output_comp_unit (dw_die_ref die, int output_if_empty, |
11248 | const unsigned char *dwo_id) |
11249 | { |
11250 | const char *secname, *oldsym; |
11251 | char *tmp; |
11252 | |
11253 | /* Unless we are outputting main CU, we may throw away empty ones. */ |
11254 | if (!output_if_empty && die->die_child == NULL) |
11255 | return; |
11256 | |
11257 | /* Even if there are no children of this DIE, we must output the information |
11258 | about the compilation unit. Otherwise, on an empty translation unit, we |
11259 | will generate a present, but empty, .debug_info section. IRIX 6.5 `nm' |
11260 | will then complain when examining the file. First mark all the DIEs in |
11261 | this CU so we know which get local refs. */ |
11262 | mark_dies (die); |
11263 | |
11264 | external_ref_hash_type *extern_map = optimize_external_refs (die); |
11265 | |
11266 | /* For now, optimize only the main CU, in order to optimize the rest |
11267 | we'd need to see all of them earlier. Leave the rest for post-linking |
11268 | tools like DWZ. */ |
11269 | if (die == comp_unit_die ()) |
11270 | abbrev_opt_start = vec_safe_length (v: abbrev_die_table); |
11271 | |
11272 | build_abbrev_table (die, extern_map); |
11273 | |
11274 | optimize_abbrev_table (); |
11275 | |
11276 | delete extern_map; |
11277 | |
11278 | /* Initialize the beginning DIE offset - and calculate sizes/offsets. */ |
11279 | next_die_offset = (dwo_id |
11280 | ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
11281 | : DWARF_COMPILE_UNIT_HEADER_SIZE); |
11282 | calc_die_sizes (die); |
11283 | |
11284 | oldsym = die->die_id.die_symbol; |
11285 | if (oldsym && die->comdat_type_p) |
11286 | { |
11287 | tmp = XALLOCAVEC (char, strlen (oldsym) + 24); |
11288 | |
11289 | sprintf (s: tmp, format: ".gnu.linkonce.wi.%s" , oldsym); |
11290 | secname = tmp; |
11291 | die->die_id.die_symbol = NULL; |
11292 | switch_to_section (get_section (secname, SECTION_DEBUG, NULL)); |
11293 | } |
11294 | else |
11295 | { |
11296 | switch_to_section (debug_info_section); |
11297 | ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label); |
11298 | info_section_emitted = true; |
11299 | } |
11300 | |
11301 | /* For LTO cross unit DIE refs we want a symbol on the start of the |
11302 | debuginfo section, not on the CU DIE. */ |
11303 | if ((flag_generate_lto || flag_generate_offload) && oldsym) |
11304 | { |
11305 | /* ??? No way to get visibility assembled without a decl. */ |
11306 | tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, |
11307 | get_identifier (oldsym), char_type_node); |
11308 | TREE_PUBLIC (decl) = true; |
11309 | TREE_STATIC (decl) = true; |
11310 | DECL_ARTIFICIAL (decl) = true; |
11311 | DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN; |
11312 | DECL_VISIBILITY_SPECIFIED (decl) = true; |
11313 | targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN); |
11314 | #ifdef ASM_WEAKEN_LABEL |
11315 | /* We prefer a .weak because that handles duplicates from duplicate |
11316 | archive members in a graceful way. */ |
11317 | ASM_WEAKEN_LABEL (asm_out_file, oldsym); |
11318 | #else |
11319 | targetm.asm_out.globalize_label (asm_out_file, oldsym); |
11320 | #endif |
11321 | ASM_OUTPUT_LABEL (asm_out_file, oldsym); |
11322 | } |
11323 | |
11324 | /* Output debugging information. */ |
11325 | output_compilation_unit_header (ut: dwo_id |
11326 | ? DW_UT_split_compile : DW_UT_compile); |
11327 | if (dwarf_version >= 5) |
11328 | { |
11329 | if (dwo_id != NULL) |
11330 | for (int i = 0; i < 8; i++) |
11331 | dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL); |
11332 | } |
11333 | output_die (die); |
11334 | |
11335 | /* Leave the marks on the main CU, so we can check them in |
11336 | output_pubnames. */ |
11337 | if (oldsym) |
11338 | { |
11339 | unmark_dies (die); |
11340 | die->die_id.die_symbol = oldsym; |
11341 | } |
11342 | } |
11343 | |
11344 | /* Whether to generate the DWARF accelerator tables in .debug_pubnames |
11345 | and .debug_pubtypes. This is configured per-target, but can be |
11346 | overridden by the -gpubnames or -gno-pubnames options. */ |
11347 | |
11348 | static inline bool |
11349 | want_pubnames (void) |
11350 | { |
11351 | if (debug_info_level <= DINFO_LEVEL_TERSE |
11352 | /* Names and types go to the early debug part only. */ |
11353 | || in_lto_p) |
11354 | return false; |
11355 | if (debug_generate_pub_sections != -1) |
11356 | return debug_generate_pub_sections; |
11357 | return targetm.want_debug_pub_sections; |
11358 | } |
11359 | |
11360 | /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */ |
11361 | |
11362 | static void |
11363 | add_AT_pubnames (dw_die_ref die) |
11364 | { |
11365 | if (want_pubnames ()) |
11366 | add_AT_flag (die, attr_kind: DW_AT_GNU_pubnames, flag: 1); |
11367 | } |
11368 | |
11369 | /* Add a string attribute value to a skeleton DIE. */ |
11370 | |
11371 | static inline void |
11372 | add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, |
11373 | const char *str) |
11374 | { |
11375 | dw_attr_node attr; |
11376 | struct indirect_string_node *node; |
11377 | |
11378 | if (! skeleton_debug_str_hash) |
11379 | skeleton_debug_str_hash |
11380 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
11381 | |
11382 | node = find_AT_string_in_table (str, table: skeleton_debug_str_hash); |
11383 | find_string_form (node); |
11384 | if (node->form == dwarf_FORM (form: DW_FORM_strx)) |
11385 | node->form = DW_FORM_strp; |
11386 | |
11387 | attr.dw_attr = attr_kind; |
11388 | attr.dw_attr_val.val_class = dw_val_class_str; |
11389 | attr.dw_attr_val.val_entry = NULL; |
11390 | attr.dw_attr_val.v.val_str = node; |
11391 | add_dwarf_attr (die, attr: &attr); |
11392 | } |
11393 | |
11394 | /* Helper function to generate top-level dies for skeleton debug_info and |
11395 | debug_types. */ |
11396 | |
11397 | static void |
11398 | add_top_level_skeleton_die_attrs (dw_die_ref die) |
11399 | { |
11400 | const char *dwo_file_name = concat (aux_base_name, ".dwo" , NULL); |
11401 | const char *comp_dir = comp_dir_string (); |
11402 | |
11403 | add_skeleton_AT_string (die, attr_kind: dwarf_AT (at: DW_AT_dwo_name), str: dwo_file_name); |
11404 | if (comp_dir != NULL) |
11405 | add_skeleton_AT_string (die, attr_kind: DW_AT_comp_dir, str: comp_dir); |
11406 | add_AT_pubnames (die); |
11407 | if (addr_index_table != NULL && addr_index_table->size () > 0) |
11408 | add_AT_lineptr (die, attr_kind: dwarf_AT (at: DW_AT_addr_base), label: debug_addr_section_label); |
11409 | } |
11410 | |
11411 | /* Output skeleton debug sections that point to the dwo file. */ |
11412 | |
11413 | static void |
11414 | output_skeleton_debug_sections (dw_die_ref comp_unit, |
11415 | const unsigned char *dwo_id) |
11416 | { |
11417 | /* These attributes will be found in the full debug_info section. */ |
11418 | remove_AT (die: comp_unit, attr_kind: DW_AT_producer); |
11419 | remove_AT (die: comp_unit, attr_kind: DW_AT_language); |
11420 | |
11421 | switch_to_section (debug_skeleton_info_section); |
11422 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label); |
11423 | |
11424 | /* Produce the skeleton compilation-unit header. This one differs enough from |
11425 | a normal CU header that it's better not to call output_compilation_unit |
11426 | header. */ |
11427 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
11428 | dw2_asm_output_data (4, 0xffffffff, |
11429 | "Initial length escape value indicating 64-bit " |
11430 | "DWARF extension" ); |
11431 | |
11432 | dw2_asm_output_data (dwarf_offset_size, |
11433 | DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE |
11434 | - DWARF_INITIAL_LENGTH_SIZE |
11435 | + size_of_die (die: comp_unit), |
11436 | "Length of Compilation Unit Info" ); |
11437 | output_dwarf_version (); |
11438 | if (dwarf_version >= 5) |
11439 | { |
11440 | dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton" ); |
11441 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
11442 | } |
11443 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_abbrev_section_label, |
11444 | debug_skeleton_abbrev_section, |
11445 | "Offset Into Abbrev. Section" ); |
11446 | if (dwarf_version < 5) |
11447 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)" ); |
11448 | else |
11449 | for (int i = 0; i < 8; i++) |
11450 | dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL); |
11451 | |
11452 | comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV; |
11453 | output_die (die: comp_unit); |
11454 | |
11455 | /* Build the skeleton debug_abbrev section. */ |
11456 | switch_to_section (debug_skeleton_abbrev_section); |
11457 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label); |
11458 | |
11459 | output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, abbrev: comp_unit); |
11460 | |
11461 | dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev" ); |
11462 | } |
11463 | |
11464 | /* Output a comdat type unit DIE and its children. */ |
11465 | |
11466 | static void |
11467 | output_comdat_type_unit (comdat_type_node *node, |
11468 | bool early_lto_debug ATTRIBUTE_UNUSED) |
11469 | { |
11470 | const char *secname; |
11471 | char *tmp; |
11472 | int i; |
11473 | #if defined (OBJECT_FORMAT_ELF) |
11474 | tree comdat_key; |
11475 | #endif |
11476 | |
11477 | /* First mark all the DIEs in this CU so we know which get local refs. */ |
11478 | mark_dies (die: node->root_die); |
11479 | |
11480 | external_ref_hash_type *extern_map = optimize_external_refs (die: node->root_die); |
11481 | |
11482 | build_abbrev_table (die: node->root_die, extern_map); |
11483 | |
11484 | delete extern_map; |
11485 | extern_map = NULL; |
11486 | |
11487 | /* Initialize the beginning DIE offset - and calculate sizes/offsets. */ |
11488 | next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE; |
11489 | calc_die_sizes (die: node->root_die); |
11490 | |
11491 | #if defined (OBJECT_FORMAT_ELF) |
11492 | if (dwarf_version >= 5) |
11493 | { |
11494 | if (!dwarf_split_debug_info) |
11495 | secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION; |
11496 | else |
11497 | secname = (early_lto_debug |
11498 | ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION); |
11499 | } |
11500 | else if (!dwarf_split_debug_info) |
11501 | secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types" ; |
11502 | else |
11503 | secname = (early_lto_debug |
11504 | ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo" ); |
11505 | |
11506 | tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2); |
11507 | sprintf (s: tmp, dwarf_version >= 5 ? "wi." : "wt." ); |
11508 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
11509 | sprintf (s: tmp + 3 + i * 2, format: "%02x" , node->signature[i] & 0xff); |
11510 | comdat_key = get_identifier (tmp); |
11511 | targetm.asm_out.named_section (secname, |
11512 | SECTION_DEBUG | SECTION_LINKONCE, |
11513 | comdat_key); |
11514 | #else |
11515 | tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2); |
11516 | sprintf (tmp, (dwarf_version >= 5 |
11517 | ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt." )); |
11518 | for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++) |
11519 | sprintf (tmp + 17 + i * 2, "%02x" , node->signature[i] & 0xff); |
11520 | secname = tmp; |
11521 | switch_to_section (get_section (secname, SECTION_DEBUG, NULL)); |
11522 | #endif |
11523 | |
11524 | /* Output debugging information. */ |
11525 | output_compilation_unit_header (dwarf_split_debug_info |
11526 | ? DW_UT_split_type : DW_UT_type); |
11527 | output_signature (sig: node->signature, name: "Type Signature" ); |
11528 | dw2_asm_output_data (dwarf_offset_size, node->type_die->die_offset, |
11529 | "Offset to Type DIE" ); |
11530 | output_die (die: node->root_die); |
11531 | |
11532 | unmark_dies (die: node->root_die); |
11533 | } |
11534 | |
11535 | /* Return the DWARF2/3 pubname associated with a decl. */ |
11536 | |
11537 | static const char * |
11538 | dwarf2_name (tree decl, int scope) |
11539 | { |
11540 | if (DECL_NAMELESS (decl)) |
11541 | return NULL; |
11542 | return lang_hooks.dwarf_name (decl, scope ? 1 : 0); |
11543 | } |
11544 | |
11545 | /* Add a new entry to .debug_pubnames if appropriate. */ |
11546 | |
11547 | static void |
11548 | add_pubname_string (const char *str, dw_die_ref die) |
11549 | { |
11550 | pubname_entry e; |
11551 | |
11552 | e.die = die; |
11553 | e.name = xstrdup (str); |
11554 | vec_safe_push (v&: pubname_table, obj: e); |
11555 | } |
11556 | |
11557 | static void |
11558 | add_pubname (tree decl, dw_die_ref die) |
11559 | { |
11560 | if (!want_pubnames ()) |
11561 | return; |
11562 | |
11563 | /* Don't add items to the table when we expect that the consumer will have |
11564 | just read the enclosing die. For example, if the consumer is looking at a |
11565 | class_member, it will either be inside the class already, or will have just |
11566 | looked up the class to find the member. Either way, searching the class is |
11567 | faster than searching the index. */ |
11568 | if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent)) |
11569 | || is_cu_die (c: die->die_parent) || is_namespace_die (c: die->die_parent)) |
11570 | { |
11571 | const char *name = dwarf2_name (decl, scope: 1); |
11572 | |
11573 | if (name) |
11574 | add_pubname_string (str: name, die); |
11575 | } |
11576 | } |
11577 | |
11578 | /* Add an enumerator to the pubnames section. */ |
11579 | |
11580 | static void |
11581 | add_enumerator_pubname (const char *scope_name, dw_die_ref die) |
11582 | { |
11583 | pubname_entry e; |
11584 | |
11585 | gcc_assert (scope_name); |
11586 | e.name = concat (scope_name, get_AT_string (die, attr_kind: DW_AT_name), NULL); |
11587 | e.die = die; |
11588 | vec_safe_push (v&: pubname_table, obj: e); |
11589 | } |
11590 | |
11591 | /* Add a new entry to .debug_pubtypes if appropriate. */ |
11592 | |
11593 | static void |
11594 | add_pubtype (tree decl, dw_die_ref die) |
11595 | { |
11596 | pubname_entry e; |
11597 | |
11598 | if (!want_pubnames ()) |
11599 | return; |
11600 | |
11601 | if ((TREE_PUBLIC (decl) |
11602 | || is_cu_die (c: die->die_parent) || is_namespace_die (c: die->die_parent)) |
11603 | && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl))) |
11604 | { |
11605 | tree scope = NULL; |
11606 | const char *scope_name = "" ; |
11607 | const char *sep = is_cxx () ? "::" : "." ; |
11608 | const char *name; |
11609 | |
11610 | scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL; |
11611 | if (scope && TREE_CODE (scope) == NAMESPACE_DECL) |
11612 | { |
11613 | scope_name = lang_hooks.dwarf_name (scope, 1); |
11614 | if (scope_name != NULL && scope_name[0] != '\0') |
11615 | scope_name = concat (scope_name, sep, NULL); |
11616 | else |
11617 | scope_name = "" ; |
11618 | } |
11619 | |
11620 | if (TYPE_P (decl)) |
11621 | name = type_tag (decl); |
11622 | else |
11623 | name = lang_hooks.dwarf_name (decl, 1); |
11624 | |
11625 | /* If we don't have a name for the type, there's no point in adding |
11626 | it to the table. */ |
11627 | if (name != NULL && name[0] != '\0') |
11628 | { |
11629 | e.die = die; |
11630 | e.name = concat (scope_name, name, NULL); |
11631 | vec_safe_push (v&: pubtype_table, obj: e); |
11632 | } |
11633 | |
11634 | /* Although it might be more consistent to add the pubinfo for the |
11635 | enumerators as their dies are created, they should only be added if the |
11636 | enum type meets the criteria above. So rather than re-check the parent |
11637 | enum type whenever an enumerator die is created, just output them all |
11638 | here. This isn't protected by the name conditional because anonymous |
11639 | enums don't have names. */ |
11640 | if (die->die_tag == DW_TAG_enumeration_type) |
11641 | { |
11642 | dw_die_ref c; |
11643 | |
11644 | FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c)); |
11645 | } |
11646 | } |
11647 | } |
11648 | |
11649 | /* Output a single entry in the pubnames table. */ |
11650 | |
11651 | static void |
11652 | output_pubname (dw_offset die_offset, pubname_entry *entry) |
11653 | { |
11654 | dw_die_ref die = entry->die; |
11655 | int is_static = get_AT_flag (die, attr_kind: DW_AT_external) ? 0 : 1; |
11656 | |
11657 | dw2_asm_output_data (dwarf_offset_size, die_offset, "DIE offset" ); |
11658 | |
11659 | if (debug_generate_pub_sections == 2) |
11660 | { |
11661 | /* This logic follows gdb's method for determining the value of the flag |
11662 | byte. */ |
11663 | uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE; |
11664 | switch (die->die_tag) |
11665 | { |
11666 | case DW_TAG_typedef: |
11667 | case DW_TAG_base_type: |
11668 | case DW_TAG_subrange_type: |
11669 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
11670 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
11671 | break; |
11672 | case DW_TAG_enumerator: |
11673 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
11674 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
11675 | if (!is_cxx ()) |
11676 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
11677 | break; |
11678 | case DW_TAG_subprogram: |
11679 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
11680 | GDB_INDEX_SYMBOL_KIND_FUNCTION); |
11681 | if (!is_ada ()) |
11682 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
11683 | break; |
11684 | case DW_TAG_constant: |
11685 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
11686 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
11687 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
11688 | break; |
11689 | case DW_TAG_variable: |
11690 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, |
11691 | GDB_INDEX_SYMBOL_KIND_VARIABLE); |
11692 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static); |
11693 | break; |
11694 | case DW_TAG_namespace: |
11695 | case DW_TAG_imported_declaration: |
11696 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
11697 | break; |
11698 | case DW_TAG_class_type: |
11699 | case DW_TAG_interface_type: |
11700 | case DW_TAG_structure_type: |
11701 | case DW_TAG_union_type: |
11702 | case DW_TAG_enumeration_type: |
11703 | GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE); |
11704 | if (!is_cxx ()) |
11705 | GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1); |
11706 | break; |
11707 | default: |
11708 | /* An unusual tag. Leave the flag-byte empty. */ |
11709 | break; |
11710 | } |
11711 | dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE, |
11712 | "GDB-index flags" ); |
11713 | } |
11714 | |
11715 | dw2_asm_output_nstring (entry->name, -1, "external name" ); |
11716 | } |
11717 | |
11718 | |
11719 | /* Output the public names table used to speed up access to externally |
11720 | visible names; or the public types table used to find type definitions. */ |
11721 | |
11722 | static void |
11723 | output_pubnames (vec<pubname_entry, va_gc> *names) |
11724 | { |
11725 | unsigned i; |
11726 | unsigned long pubnames_length = size_of_pubnames (names); |
11727 | pubname_entry *pub; |
11728 | |
11729 | if (!XCOFF_DEBUGGING_INFO) |
11730 | { |
11731 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
11732 | dw2_asm_output_data (4, 0xffffffff, |
11733 | "Initial length escape value indicating 64-bit DWARF extension" ); |
11734 | dw2_asm_output_data (dwarf_offset_size, pubnames_length, |
11735 | "Pub Info Length" ); |
11736 | } |
11737 | |
11738 | /* Version number for pubnames/pubtypes is independent of dwarf version. */ |
11739 | dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version" ); |
11740 | |
11741 | if (dwarf_split_debug_info) |
11742 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label, |
11743 | debug_skeleton_info_section, |
11744 | "Offset of Compilation Unit Info" ); |
11745 | else |
11746 | dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label, |
11747 | debug_info_section, |
11748 | "Offset of Compilation Unit Info" ); |
11749 | dw2_asm_output_data (dwarf_offset_size, next_die_offset, |
11750 | "Compilation Unit Length" ); |
11751 | |
11752 | FOR_EACH_VEC_ELT (*names, i, pub) |
11753 | { |
11754 | if (include_pubname_in_output (table: names, p: pub)) |
11755 | { |
11756 | dw_offset die_offset = pub->die->die_offset; |
11757 | |
11758 | /* We shouldn't see pubnames for DIEs outside of the main CU. */ |
11759 | if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator) |
11760 | gcc_assert (pub->die->die_mark); |
11761 | |
11762 | /* If we're putting types in their own .debug_types sections, |
11763 | the .debug_pubtypes table will still point to the compile |
11764 | unit (not the type unit), so we want to use the offset of |
11765 | the skeleton DIE (if there is one). */ |
11766 | if (pub->die->comdat_type_p && names == pubtype_table) |
11767 | { |
11768 | comdat_type_node *type_node = pub->die->die_id.die_type_node; |
11769 | |
11770 | if (type_node != NULL) |
11771 | die_offset = (type_node->skeleton_die != NULL |
11772 | ? type_node->skeleton_die->die_offset |
11773 | : comp_unit_die ()->die_offset); |
11774 | } |
11775 | |
11776 | output_pubname (die_offset, entry: pub); |
11777 | } |
11778 | } |
11779 | |
11780 | dw2_asm_output_data (dwarf_offset_size, 0, NULL); |
11781 | } |
11782 | |
11783 | /* Output public names and types tables if necessary. */ |
11784 | |
11785 | static void |
11786 | output_pubtables (void) |
11787 | { |
11788 | if (!want_pubnames () || !info_section_emitted) |
11789 | return; |
11790 | |
11791 | switch_to_section (debug_pubnames_section); |
11792 | output_pubnames (names: pubname_table); |
11793 | /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2. |
11794 | It shouldn't hurt to emit it always, since pure DWARF2 consumers |
11795 | simply won't look for the section. */ |
11796 | switch_to_section (debug_pubtypes_section); |
11797 | output_pubnames (names: pubtype_table); |
11798 | } |
11799 | |
11800 | |
11801 | /* Output the information that goes into the .debug_aranges table. |
11802 | Namely, define the beginning and ending address range of the |
11803 | text section generated for this compilation unit. */ |
11804 | |
11805 | static void |
11806 | output_aranges (void) |
11807 | { |
11808 | unsigned i; |
11809 | unsigned long aranges_length = size_of_aranges (); |
11810 | |
11811 | if (!XCOFF_DEBUGGING_INFO) |
11812 | { |
11813 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
11814 | dw2_asm_output_data (4, 0xffffffff, |
11815 | "Initial length escape value indicating 64-bit DWARF extension" ); |
11816 | dw2_asm_output_data (dwarf_offset_size, aranges_length, |
11817 | "Length of Address Ranges Info" ); |
11818 | } |
11819 | |
11820 | /* Version number for aranges is still 2, even up to DWARF5. */ |
11821 | dw2_asm_output_data (2, 2, "DWARF aranges version" ); |
11822 | if (dwarf_split_debug_info) |
11823 | dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label, |
11824 | debug_skeleton_info_section, |
11825 | "Offset of Compilation Unit Info" ); |
11826 | else |
11827 | dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label, |
11828 | debug_info_section, |
11829 | "Offset of Compilation Unit Info" ); |
11830 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address" ); |
11831 | dw2_asm_output_data (1, 0, "Size of Segment Descriptor" ); |
11832 | |
11833 | /* We need to align to twice the pointer size here. */ |
11834 | if (DWARF_ARANGES_PAD_SIZE) |
11835 | { |
11836 | /* Pad using a 2 byte words so that padding is correct for any |
11837 | pointer size. */ |
11838 | dw2_asm_output_data (2, 0, "Pad to %d byte boundary" , |
11839 | 2 * DWARF2_ADDR_SIZE); |
11840 | for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2) |
11841 | dw2_asm_output_data (2, 0, NULL); |
11842 | } |
11843 | |
11844 | /* It is necessary not to output these entries if the sections were |
11845 | not used; if the sections were not used, the length will be 0 and |
11846 | the address may end up as 0 if the section is discarded by ld |
11847 | --gc-sections, leaving an invalid (0, 0) entry that can be |
11848 | confused with the terminator. */ |
11849 | if (switch_text_ranges) |
11850 | { |
11851 | const char *prev_loc = text_section_label; |
11852 | const char *loc; |
11853 | unsigned idx; |
11854 | |
11855 | FOR_EACH_VEC_ELT (*switch_text_ranges, idx, loc) |
11856 | if (prev_loc) |
11857 | { |
11858 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
11859 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, loc, prev_loc, "Length" ); |
11860 | prev_loc = NULL; |
11861 | } |
11862 | else |
11863 | prev_loc = loc; |
11864 | |
11865 | if (prev_loc) |
11866 | { |
11867 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
11868 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label, |
11869 | prev_loc, "Length" ); |
11870 | } |
11871 | } |
11872 | |
11873 | if (switch_cold_ranges) |
11874 | { |
11875 | const char *prev_loc = cold_text_section_label; |
11876 | const char *loc; |
11877 | unsigned idx; |
11878 | |
11879 | FOR_EACH_VEC_ELT (*switch_cold_ranges, idx, loc) |
11880 | if (prev_loc) |
11881 | { |
11882 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
11883 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, loc, prev_loc, "Length" ); |
11884 | prev_loc = NULL; |
11885 | } |
11886 | else |
11887 | prev_loc = loc; |
11888 | |
11889 | if (prev_loc) |
11890 | { |
11891 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, prev_loc, "Address" ); |
11892 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label, |
11893 | prev_loc, "Length" ); |
11894 | } |
11895 | } |
11896 | |
11897 | if (have_multiple_function_sections) |
11898 | { |
11899 | unsigned fde_idx; |
11900 | dw_fde_ref fde; |
11901 | |
11902 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
11903 | { |
11904 | if (fde->ignored_debug) |
11905 | continue; |
11906 | if (!fde->in_std_section) |
11907 | { |
11908 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin, |
11909 | "Address" ); |
11910 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end, |
11911 | fde->dw_fde_begin, "Length" ); |
11912 | } |
11913 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
11914 | { |
11915 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin, |
11916 | "Address" ); |
11917 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end, |
11918 | fde->dw_fde_second_begin, "Length" ); |
11919 | } |
11920 | } |
11921 | } |
11922 | |
11923 | /* Output the terminator words. */ |
11924 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
11925 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
11926 | } |
11927 | |
11928 | /* Add a new entry to .debug_ranges. Return its index into |
11929 | ranges_table vector. */ |
11930 | |
11931 | static unsigned int |
11932 | add_ranges_num (int num, bool maybe_new_sec) |
11933 | { |
11934 | dw_ranges r = { NULL, .num: num, .idx: 0, .maybe_new_sec: maybe_new_sec, NULL, NULL }; |
11935 | vec_safe_push (v&: ranges_table, obj: r); |
11936 | return vec_safe_length (v: ranges_table) - 1; |
11937 | } |
11938 | |
11939 | /* Add a new entry to .debug_ranges corresponding to a block, or a |
11940 | range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if |
11941 | this entry might be in a different section from previous range. */ |
11942 | |
11943 | static unsigned int |
11944 | add_ranges (const_tree block, bool maybe_new_sec) |
11945 | { |
11946 | return add_ranges_num (num: block ? BLOCK_NUMBER (block) : 0, maybe_new_sec); |
11947 | } |
11948 | |
11949 | /* Note that (*rnglist_table)[offset] is either a head of a rnglist |
11950 | chain, or middle entry of a chain that will be directly referred to. */ |
11951 | |
11952 | static void |
11953 | note_rnglist_head (unsigned int offset) |
11954 | { |
11955 | if (dwarf_version < 5 || (*ranges_table)[offset].label) |
11956 | return; |
11957 | (*ranges_table)[offset].label = gen_internal_sym (prefix: "LLRL" ); |
11958 | } |
11959 | |
11960 | /* Add a new entry to .debug_ranges corresponding to a pair of labels. |
11961 | When using dwarf_split_debug_info, address attributes in dies destined |
11962 | for the final executable should be direct references--setting the |
11963 | parameter force_direct ensures this behavior. */ |
11964 | |
11965 | static void |
11966 | add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end, |
11967 | bool *added, bool force_direct) |
11968 | { |
11969 | unsigned int in_use = vec_safe_length (v: ranges_by_label); |
11970 | unsigned int offset; |
11971 | dw_ranges_by_label rbl = { .begin: begin, .end: end }; |
11972 | vec_safe_push (v&: ranges_by_label, obj: rbl); |
11973 | offset = add_ranges_num (num: -(int)in_use - 1, maybe_new_sec: true); |
11974 | if (!*added) |
11975 | { |
11976 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset, force_direct); |
11977 | *added = true; |
11978 | note_rnglist_head (offset); |
11979 | if (dwarf_split_debug_info && force_direct) |
11980 | (*ranges_table)[offset].idx = DW_RANGES_IDX_SKELETON; |
11981 | } |
11982 | } |
11983 | |
11984 | /* Emit .debug_ranges section. */ |
11985 | |
11986 | static void |
11987 | output_ranges (void) |
11988 | { |
11989 | unsigned i; |
11990 | static const char *const start_fmt = "Offset %#x" ; |
11991 | const char *fmt = start_fmt; |
11992 | dw_ranges *r; |
11993 | |
11994 | switch_to_section (debug_ranges_section); |
11995 | ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label); |
11996 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
11997 | { |
11998 | int block_num = r->num; |
11999 | |
12000 | if (block_num > 0) |
12001 | { |
12002 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12003 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12004 | |
12005 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
12006 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
12007 | |
12008 | /* If all code is in the text section, then the compilation |
12009 | unit base address defaults to DW_AT_low_pc, which is the |
12010 | base of the text section. */ |
12011 | if (!have_multiple_function_sections) |
12012 | { |
12013 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel, |
12014 | text_section_label, |
12015 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
12016 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel, |
12017 | text_section_label, NULL); |
12018 | } |
12019 | |
12020 | /* Otherwise, the compilation unit base address is zero, |
12021 | which allows us to use absolute addresses, and not worry |
12022 | about whether the target supports cross-section |
12023 | arithmetic. */ |
12024 | else |
12025 | { |
12026 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12027 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
12028 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL); |
12029 | } |
12030 | |
12031 | fmt = NULL; |
12032 | } |
12033 | |
12034 | /* Negative block_num stands for an index into ranges_by_label. */ |
12035 | else if (block_num < 0) |
12036 | { |
12037 | int lab_idx = - block_num - 1; |
12038 | |
12039 | if (!have_multiple_function_sections) |
12040 | { |
12041 | gcc_unreachable (); |
12042 | #if 0 |
12043 | /* If we ever use add_ranges_by_labels () for a single |
12044 | function section, all we have to do is to take out |
12045 | the #if 0 above. */ |
12046 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, |
12047 | (*ranges_by_label)[lab_idx].begin, |
12048 | text_section_label, |
12049 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
12050 | dw2_asm_output_delta (DWARF2_ADDR_SIZE, |
12051 | (*ranges_by_label)[lab_idx].end, |
12052 | text_section_label, NULL); |
12053 | #endif |
12054 | } |
12055 | else |
12056 | { |
12057 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, |
12058 | (*ranges_by_label)[lab_idx].begin, |
12059 | fmt, i * 2 * DWARF2_ADDR_SIZE); |
12060 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, |
12061 | (*ranges_by_label)[lab_idx].end, |
12062 | NULL); |
12063 | } |
12064 | } |
12065 | else |
12066 | { |
12067 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
12068 | dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL); |
12069 | fmt = start_fmt; |
12070 | } |
12071 | } |
12072 | } |
12073 | |
12074 | /* Non-zero if .debug_line_str should be used for .debug_line section |
12075 | strings or strings that are likely shareable with those. */ |
12076 | #define DWARF5_USE_DEBUG_LINE_STR \ |
12077 | (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \ |
12078 | && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \ |
12079 | /* FIXME: there is no .debug_line_str.dwo section, \ |
12080 | for -gsplit-dwarf we should use DW_FORM_strx instead. */ \ |
12081 | && !dwarf_split_debug_info) |
12082 | |
12083 | |
12084 | /* Returns TRUE if we are outputting DWARF5 and the assembler supports |
12085 | DWARF5 .debug_line tables using .debug_line_str or we generate |
12086 | it ourselves, except for split-dwarf which doesn't have a |
12087 | .debug_line_str. */ |
12088 | static bool |
12089 | asm_outputs_debug_line_str (void) |
12090 | { |
12091 | if (dwarf_version >= 5 |
12092 | && ! output_asm_line_debug_info () |
12093 | && DWARF5_USE_DEBUG_LINE_STR) |
12094 | return true; |
12095 | else |
12096 | { |
12097 | #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG) |
12098 | return !dwarf_split_debug_info && dwarf_version >= 5; |
12099 | #else |
12100 | return false; |
12101 | #endif |
12102 | } |
12103 | } |
12104 | |
12105 | /* Return true if it is beneficial to use DW_RLE_base_address{,x}. |
12106 | I is index of the following range. */ |
12107 | |
12108 | static bool |
12109 | use_distinct_base_address_for_range (unsigned int i) |
12110 | { |
12111 | if (i >= vec_safe_length (v: ranges_table)) |
12112 | return false; |
12113 | |
12114 | dw_ranges *r2 = &(*ranges_table)[i]; |
12115 | /* Use DW_RLE_base_address{,x} if there is a next range in the |
12116 | range list and is guaranteed to be in the same section. */ |
12117 | return r2->num != 0 && r2->label == NULL && !r2->maybe_new_sec; |
12118 | } |
12119 | |
12120 | /* Assign .debug_rnglists indexes and unique indexes into the debug_addr |
12121 | section when needed. */ |
12122 | |
12123 | static void |
12124 | index_rnglists (void) |
12125 | { |
12126 | unsigned i; |
12127 | dw_ranges *r; |
12128 | bool base = false; |
12129 | |
12130 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
12131 | { |
12132 | if (r->label && r->idx != DW_RANGES_IDX_SKELETON) |
12133 | r->idx = rnglist_idx++; |
12134 | |
12135 | int block_num = r->num; |
12136 | if ((HAVE_AS_LEB128 || block_num < 0) |
12137 | && !have_multiple_function_sections) |
12138 | continue; |
12139 | if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec)) |
12140 | base = false; |
12141 | if (block_num > 0) |
12142 | { |
12143 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12144 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12145 | |
12146 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
12147 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
12148 | |
12149 | if (HAVE_AS_LEB128) |
12150 | { |
12151 | if (!base && use_distinct_base_address_for_range (i: i + 1)) |
12152 | { |
12153 | r->begin_entry = add_addr_table_entry (addr: xstrdup (blabel), |
12154 | kind: ate_kind_label); |
12155 | base = true; |
12156 | } |
12157 | if (base) |
12158 | /* If we have a base, no need for further |
12159 | begin_entry/end_entry, as DW_RLE_offset_pair will be |
12160 | used. */ |
12161 | continue; |
12162 | r->begin_entry |
12163 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
12164 | /* No need for end_entry, DW_RLE_start{,x}_length will use |
12165 | length as opposed to a pair of addresses. */ |
12166 | } |
12167 | else |
12168 | { |
12169 | r->begin_entry |
12170 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
12171 | r->end_entry |
12172 | = add_addr_table_entry (addr: xstrdup (elabel), kind: ate_kind_label); |
12173 | } |
12174 | } |
12175 | |
12176 | /* Negative block_num stands for an index into ranges_by_label. */ |
12177 | else if (block_num < 0) |
12178 | { |
12179 | int lab_idx = - block_num - 1; |
12180 | const char *blabel = (*ranges_by_label)[lab_idx].begin; |
12181 | const char *elabel = (*ranges_by_label)[lab_idx].end; |
12182 | |
12183 | r->begin_entry |
12184 | = add_addr_table_entry (addr: xstrdup (blabel), kind: ate_kind_label); |
12185 | if (!HAVE_AS_LEB128) |
12186 | r->end_entry |
12187 | = add_addr_table_entry (addr: xstrdup (elabel), kind: ate_kind_label); |
12188 | } |
12189 | } |
12190 | } |
12191 | |
12192 | /* Emit .debug_rnglists or (when DWO is true) .debug_rnglists.dwo section. */ |
12193 | |
12194 | static bool |
12195 | output_rnglists (unsigned generation, bool dwo) |
12196 | { |
12197 | unsigned i; |
12198 | dw_ranges *r; |
12199 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
12200 | char l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
12201 | char basebuf[MAX_ARTIFICIAL_LABEL_BYTES]; |
12202 | |
12203 | if (dwo) |
12204 | switch_to_section (debug_ranges_dwo_section); |
12205 | else |
12206 | { |
12207 | switch_to_section (debug_ranges_section); |
12208 | ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label); |
12209 | } |
12210 | /* There are up to 4 unique ranges labels per generation. |
12211 | See also init_sections_and_labels. */ |
12212 | ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL, |
12213 | 2 + 2 * dwo + generation * 6); |
12214 | ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL, |
12215 | 3 + 2 * dwo + generation * 6); |
12216 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
12217 | dw2_asm_output_data (4, 0xffffffff, |
12218 | "Initial length escape value indicating " |
12219 | "64-bit DWARF extension" ); |
12220 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
12221 | "Length of Range Lists" ); |
12222 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
12223 | output_dwarf_version (); |
12224 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
12225 | dw2_asm_output_data (1, 0, "Segment Size" ); |
12226 | /* Emit the offset table only for -gsplit-dwarf. If we don't care |
12227 | about relocation sizes and primarily care about the size of .debug* |
12228 | sections in linked shared libraries and executables, then |
12229 | the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes |
12230 | into it are usually larger than just DW_FORM_sec_offset offsets |
12231 | into the .debug_rnglists section. */ |
12232 | dw2_asm_output_data (4, dwo ? rnglist_idx : 0, |
12233 | "Offset Entry Count" ); |
12234 | if (dwo) |
12235 | { |
12236 | ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label); |
12237 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
12238 | if (r->label && r->idx != DW_RANGES_IDX_SKELETON) |
12239 | dw2_asm_output_delta (dwarf_offset_size, r->label, |
12240 | ranges_base_label, NULL); |
12241 | } |
12242 | |
12243 | const char *lab = "" ; |
12244 | const char *base = NULL; |
12245 | bool skipping = false; |
12246 | bool ret = false; |
12247 | FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r) |
12248 | { |
12249 | int block_num = r->num; |
12250 | |
12251 | if (r->label) |
12252 | { |
12253 | if (dwarf_split_debug_info |
12254 | && (r->idx == DW_RANGES_IDX_SKELETON) == dwo) |
12255 | { |
12256 | ret = true; |
12257 | skipping = true; |
12258 | continue; |
12259 | } |
12260 | ASM_OUTPUT_LABEL (asm_out_file, r->label); |
12261 | lab = r->label; |
12262 | } |
12263 | if (skipping) |
12264 | { |
12265 | if (block_num == 0) |
12266 | skipping = false; |
12267 | continue; |
12268 | } |
12269 | if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec)) |
12270 | base = NULL; |
12271 | if (block_num > 0) |
12272 | { |
12273 | char blabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12274 | char elabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
12275 | |
12276 | ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num); |
12277 | ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num); |
12278 | |
12279 | if (HAVE_AS_LEB128) |
12280 | { |
12281 | /* If all code is in the text section, then the compilation |
12282 | unit base address defaults to DW_AT_low_pc, which is the |
12283 | base of the text section. */ |
12284 | if (!have_multiple_function_sections) |
12285 | { |
12286 | dw2_asm_output_data (1, DW_RLE_offset_pair, |
12287 | "DW_RLE_offset_pair (%s)" , lab); |
12288 | dw2_asm_output_delta_uleb128 (blabel, text_section_label, |
12289 | "Range begin address (%s)" , lab); |
12290 | dw2_asm_output_delta_uleb128 (elabel, text_section_label, |
12291 | "Range end address (%s)" , lab); |
12292 | continue; |
12293 | } |
12294 | if (base == NULL && use_distinct_base_address_for_range (i: i + 1)) |
12295 | { |
12296 | if (dwarf_split_debug_info) |
12297 | { |
12298 | dw2_asm_output_data (1, DW_RLE_base_addressx, |
12299 | "DW_RLE_base_addressx (%s)" , lab); |
12300 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12301 | "Base address index (%s)" , |
12302 | blabel); |
12303 | } |
12304 | else |
12305 | { |
12306 | dw2_asm_output_data (1, DW_RLE_base_address, |
12307 | "DW_RLE_base_address (%s)" , lab); |
12308 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12309 | "Base address (%s)" , lab); |
12310 | } |
12311 | strcpy (dest: basebuf, src: blabel); |
12312 | base = basebuf; |
12313 | } |
12314 | if (base) |
12315 | { |
12316 | dw2_asm_output_data (1, DW_RLE_offset_pair, |
12317 | "DW_RLE_offset_pair (%s)" , lab); |
12318 | dw2_asm_output_delta_uleb128 (blabel, base, |
12319 | "Range begin address (%s)" , lab); |
12320 | dw2_asm_output_delta_uleb128 (elabel, base, |
12321 | "Range end address (%s)" , lab); |
12322 | continue; |
12323 | } |
12324 | if (dwarf_split_debug_info) |
12325 | { |
12326 | dw2_asm_output_data (1, DW_RLE_startx_length, |
12327 | "DW_RLE_startx_length (%s)" , lab); |
12328 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12329 | "Range begin address index " |
12330 | "(%s)" , blabel); |
12331 | } |
12332 | else |
12333 | { |
12334 | dw2_asm_output_data (1, DW_RLE_start_length, |
12335 | "DW_RLE_start_length (%s)" , lab); |
12336 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12337 | "Range begin address (%s)" , lab); |
12338 | } |
12339 | dw2_asm_output_delta_uleb128 (elabel, blabel, |
12340 | "Range length (%s)" , lab); |
12341 | } |
12342 | else if (dwarf_split_debug_info) |
12343 | { |
12344 | dw2_asm_output_data (1, DW_RLE_startx_endx, |
12345 | "DW_RLE_startx_endx (%s)" , lab); |
12346 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12347 | "Range begin address index " |
12348 | "(%s)" , blabel); |
12349 | dw2_asm_output_data_uleb128 (r->end_entry->index, |
12350 | "Range end address index " |
12351 | "(%s)" , elabel); |
12352 | } |
12353 | else |
12354 | { |
12355 | dw2_asm_output_data (1, DW_RLE_start_end, |
12356 | "DW_RLE_start_end (%s)" , lab); |
12357 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12358 | "Range begin address (%s)" , lab); |
12359 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, |
12360 | "Range end address (%s)" , lab); |
12361 | } |
12362 | } |
12363 | |
12364 | /* Negative block_num stands for an index into ranges_by_label. */ |
12365 | else if (block_num < 0) |
12366 | { |
12367 | int lab_idx = - block_num - 1; |
12368 | const char *blabel = (*ranges_by_label)[lab_idx].begin; |
12369 | const char *elabel = (*ranges_by_label)[lab_idx].end; |
12370 | |
12371 | if (!have_multiple_function_sections) |
12372 | gcc_unreachable (); |
12373 | if (HAVE_AS_LEB128) |
12374 | { |
12375 | if (dwarf_split_debug_info) |
12376 | { |
12377 | dw2_asm_output_data (1, DW_RLE_startx_length, |
12378 | "DW_RLE_startx_length (%s)" , lab); |
12379 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12380 | "Range begin address index " |
12381 | "(%s)" , blabel); |
12382 | } |
12383 | else |
12384 | { |
12385 | dw2_asm_output_data (1, DW_RLE_start_length, |
12386 | "DW_RLE_start_length (%s)" , lab); |
12387 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12388 | "Range begin address (%s)" , lab); |
12389 | } |
12390 | dw2_asm_output_delta_uleb128 (elabel, blabel, |
12391 | "Range length (%s)" , lab); |
12392 | } |
12393 | else if (dwarf_split_debug_info) |
12394 | { |
12395 | dw2_asm_output_data (1, DW_RLE_startx_endx, |
12396 | "DW_RLE_startx_endx (%s)" , lab); |
12397 | dw2_asm_output_data_uleb128 (r->begin_entry->index, |
12398 | "Range begin address index " |
12399 | "(%s)" , blabel); |
12400 | dw2_asm_output_data_uleb128 (r->end_entry->index, |
12401 | "Range end address index " |
12402 | "(%s)" , elabel); |
12403 | } |
12404 | else |
12405 | { |
12406 | dw2_asm_output_data (1, DW_RLE_start_end, |
12407 | "DW_RLE_start_end (%s)" , lab); |
12408 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel, |
12409 | "Range begin address (%s)" , lab); |
12410 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, |
12411 | "Range end address (%s)" , lab); |
12412 | } |
12413 | } |
12414 | else |
12415 | dw2_asm_output_data (1, DW_RLE_end_of_list, |
12416 | "DW_RLE_end_of_list (%s)" , lab); |
12417 | } |
12418 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
12419 | return ret; |
12420 | } |
12421 | |
12422 | /* Data structure containing information about input files. */ |
12423 | struct file_info |
12424 | { |
12425 | const char *path; /* Complete file name. */ |
12426 | const char *fname; /* File name part. */ |
12427 | int length; /* Length of entire string. */ |
12428 | struct dwarf_file_data * file_idx; /* Index in input file table. */ |
12429 | int dir_idx; /* Index in directory table. */ |
12430 | }; |
12431 | |
12432 | /* Data structure containing information about directories with source |
12433 | files. */ |
12434 | struct dir_info |
12435 | { |
12436 | const char *path; /* Path including directory name. */ |
12437 | int length; /* Path length. */ |
12438 | int prefix; /* Index of directory entry which is a prefix. */ |
12439 | int count; /* Number of files in this directory. */ |
12440 | int dir_idx; /* Index of directory used as base. */ |
12441 | }; |
12442 | |
12443 | /* Callback function for file_info comparison. We sort by looking at |
12444 | the directories in the path. */ |
12445 | |
12446 | static int |
12447 | file_info_cmp (const void *p1, const void *p2) |
12448 | { |
12449 | const struct file_info *const s1 = (const struct file_info *) p1; |
12450 | const struct file_info *const s2 = (const struct file_info *) p2; |
12451 | const unsigned char *cp1; |
12452 | const unsigned char *cp2; |
12453 | |
12454 | /* Take care of file names without directories. We need to make sure that |
12455 | we return consistent values to qsort since some will get confused if |
12456 | we return the same value when identical operands are passed in opposite |
12457 | orders. So if neither has a directory, return 0 and otherwise return |
12458 | 1 or -1 depending on which one has the directory. We want the one with |
12459 | the directory to sort after the one without, so all no directory files |
12460 | are at the start (normally only the compilation unit file). */ |
12461 | if ((s1->path == s1->fname || s2->path == s2->fname)) |
12462 | return (s2->path == s2->fname) - (s1->path == s1->fname); |
12463 | |
12464 | cp1 = (const unsigned char *) s1->path; |
12465 | cp2 = (const unsigned char *) s2->path; |
12466 | |
12467 | while (1) |
12468 | { |
12469 | ++cp1; |
12470 | ++cp2; |
12471 | /* Reached the end of the first path? If so, handle like above, |
12472 | but now we want longer directory prefixes before shorter ones. */ |
12473 | if ((cp1 == (const unsigned char *) s1->fname) |
12474 | || (cp2 == (const unsigned char *) s2->fname)) |
12475 | return ((cp1 == (const unsigned char *) s1->fname) |
12476 | - (cp2 == (const unsigned char *) s2->fname)); |
12477 | |
12478 | /* Character of current path component the same? */ |
12479 | else if (*cp1 != *cp2) |
12480 | return *cp1 - *cp2; |
12481 | } |
12482 | } |
12483 | |
12484 | struct file_name_acquire_data |
12485 | { |
12486 | struct file_info *files; |
12487 | int used_files; |
12488 | int max_files; |
12489 | }; |
12490 | |
12491 | /* Traversal function for the hash table. */ |
12492 | |
12493 | int |
12494 | file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad) |
12495 | { |
12496 | struct dwarf_file_data *d = *slot; |
12497 | struct file_info *fi; |
12498 | const char *f; |
12499 | |
12500 | gcc_assert (fnad->max_files >= d->emitted_number); |
12501 | |
12502 | if (! d->emitted_number) |
12503 | return 1; |
12504 | |
12505 | gcc_assert (fnad->max_files != fnad->used_files); |
12506 | |
12507 | fi = fnad->files + fnad->used_files++; |
12508 | |
12509 | f = d->filename; |
12510 | |
12511 | /* Skip all leading "./". */ |
12512 | while (f[0] == '.' && IS_DIR_SEPARATOR (f[1])) |
12513 | f += 2; |
12514 | |
12515 | /* Create a new array entry. */ |
12516 | fi->path = f; |
12517 | fi->length = strlen (s: f); |
12518 | fi->file_idx = d; |
12519 | |
12520 | /* Search for the file name part. */ |
12521 | f = strrchr (s: f, DIR_SEPARATOR); |
12522 | #if defined (DIR_SEPARATOR_2) |
12523 | { |
12524 | const char *g = strrchr (fi->path, DIR_SEPARATOR_2); |
12525 | |
12526 | if (g != NULL) |
12527 | { |
12528 | if (f == NULL || f < g) |
12529 | f = g; |
12530 | } |
12531 | } |
12532 | #endif |
12533 | |
12534 | fi->fname = f == NULL ? fi->path : f + 1; |
12535 | return 1; |
12536 | } |
12537 | |
12538 | /* Helper function for output_file_names. Emit a FORM encoded |
12539 | string STR, with assembly comment start ENTRY_KIND and |
12540 | index IDX */ |
12541 | |
12542 | static void |
12543 | output_line_string (enum dwarf_form form, const char *str, |
12544 | const char *entry_kind, unsigned int idx) |
12545 | { |
12546 | switch (form) |
12547 | { |
12548 | case DW_FORM_string: |
12549 | dw2_asm_output_nstring (str, -1, "%s: %#x" , entry_kind, idx); |
12550 | break; |
12551 | case DW_FORM_line_strp: |
12552 | if (!debug_line_str_hash) |
12553 | debug_line_str_hash |
12554 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
12555 | |
12556 | struct indirect_string_node *node; |
12557 | node = find_AT_string_in_table (str, table: debug_line_str_hash); |
12558 | set_indirect_string (node); |
12559 | node->form = form; |
12560 | dw2_asm_output_offset (dwarf_offset_size, node->label, |
12561 | debug_line_str_section, "%s: %#x: \"%s\"" , |
12562 | entry_kind, 0, node->str); |
12563 | break; |
12564 | default: |
12565 | gcc_unreachable (); |
12566 | } |
12567 | } |
12568 | |
12569 | /* Output the directory table and the file name table. We try to minimize |
12570 | the total amount of memory needed. A heuristic is used to avoid large |
12571 | slowdowns with many input files. */ |
12572 | |
12573 | static void |
12574 | output_file_names (void) |
12575 | { |
12576 | struct file_name_acquire_data fnad; |
12577 | int numfiles; |
12578 | struct file_info *files; |
12579 | struct dir_info *dirs; |
12580 | int *saved; |
12581 | int *savehere; |
12582 | int *backmap; |
12583 | int ndirs; |
12584 | int idx_offset; |
12585 | int i; |
12586 | |
12587 | if (!last_emitted_file) |
12588 | { |
12589 | if (dwarf_version >= 5) |
12590 | { |
12591 | const char *comp_dir = comp_dir_string (); |
12592 | if (comp_dir == NULL) |
12593 | comp_dir = "" ; |
12594 | dw2_asm_output_data (1, 1, "Directory entry format count" ); |
12595 | enum dwarf_form str_form = DW_FORM_string; |
12596 | if (DWARF5_USE_DEBUG_LINE_STR) |
12597 | str_form = DW_FORM_line_strp; |
12598 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
12599 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
12600 | get_DW_FORM_name (form: str_form)); |
12601 | dw2_asm_output_data_uleb128 (1, "Directories count" ); |
12602 | if (str_form == DW_FORM_string) |
12603 | dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x" , 0); |
12604 | else |
12605 | output_line_string (form: str_form, str: comp_dir, entry_kind: "Directory Entry" , idx: 0); |
12606 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
12607 | if (filename0 == NULL) |
12608 | filename0 = "" ; |
12609 | #ifdef VMS_DEBUGGING_INFO |
12610 | dw2_asm_output_data (1, 4, "File name entry format count" ); |
12611 | #else |
12612 | dw2_asm_output_data (1, 2, "File name entry format count" ); |
12613 | #endif |
12614 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
12615 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
12616 | get_DW_FORM_name (form: str_form)); |
12617 | dw2_asm_output_data_uleb128 (DW_LNCT_directory_index, |
12618 | "DW_LNCT_directory_index" ); |
12619 | dw2_asm_output_data_uleb128 (DW_FORM_data1, "%s" , |
12620 | get_DW_FORM_name (form: DW_FORM_data1)); |
12621 | #ifdef VMS_DEBUGGING_INFO |
12622 | dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp" ); |
12623 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
12624 | dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size" ); |
12625 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
12626 | #endif |
12627 | dw2_asm_output_data_uleb128 (1, "File names count" ); |
12628 | |
12629 | output_line_string (form: str_form, str: filename0, entry_kind: "File Entry" , idx: 0); |
12630 | dw2_asm_output_data (1, 0, NULL); |
12631 | #ifdef VMS_DEBUGGING_INFO |
12632 | dw2_asm_output_data_uleb128 (0, NULL); |
12633 | dw2_asm_output_data_uleb128 (0, NULL); |
12634 | #endif |
12635 | } |
12636 | else |
12637 | { |
12638 | dw2_asm_output_data (1, 0, "End directory table" ); |
12639 | dw2_asm_output_data (1, 0, "End file name table" ); |
12640 | } |
12641 | return; |
12642 | } |
12643 | |
12644 | numfiles = last_emitted_file->emitted_number; |
12645 | |
12646 | /* Allocate the various arrays we need. */ |
12647 | files = XALLOCAVEC (struct file_info, numfiles); |
12648 | dirs = XALLOCAVEC (struct dir_info, numfiles); |
12649 | |
12650 | fnad.files = files; |
12651 | fnad.used_files = 0; |
12652 | fnad.max_files = numfiles; |
12653 | file_table->traverse<file_name_acquire_data *, file_name_acquire> (argument: &fnad); |
12654 | gcc_assert (fnad.used_files == fnad.max_files); |
12655 | |
12656 | qsort (files, numfiles, sizeof (files[0]), file_info_cmp); |
12657 | |
12658 | /* Find all the different directories used. */ |
12659 | dirs[0].path = files[0].path; |
12660 | dirs[0].length = files[0].fname - files[0].path; |
12661 | dirs[0].prefix = -1; |
12662 | dirs[0].count = 1; |
12663 | dirs[0].dir_idx = 0; |
12664 | files[0].dir_idx = 0; |
12665 | ndirs = 1; |
12666 | |
12667 | for (i = 1; i < numfiles; i++) |
12668 | if (files[i].fname - files[i].path == dirs[ndirs - 1].length |
12669 | && memcmp (s1: dirs[ndirs - 1].path, s2: files[i].path, |
12670 | n: dirs[ndirs - 1].length) == 0) |
12671 | { |
12672 | /* Same directory as last entry. */ |
12673 | files[i].dir_idx = ndirs - 1; |
12674 | ++dirs[ndirs - 1].count; |
12675 | } |
12676 | else |
12677 | { |
12678 | int j; |
12679 | |
12680 | /* This is a new directory. */ |
12681 | dirs[ndirs].path = files[i].path; |
12682 | dirs[ndirs].length = files[i].fname - files[i].path; |
12683 | dirs[ndirs].count = 1; |
12684 | dirs[ndirs].dir_idx = ndirs; |
12685 | files[i].dir_idx = ndirs; |
12686 | |
12687 | /* Search for a prefix. */ |
12688 | dirs[ndirs].prefix = -1; |
12689 | for (j = 0; j < ndirs; j++) |
12690 | if (dirs[j].length < dirs[ndirs].length |
12691 | && dirs[j].length > 1 |
12692 | && (dirs[ndirs].prefix == -1 |
12693 | || dirs[j].length > dirs[dirs[ndirs].prefix].length) |
12694 | && memcmp (s1: dirs[j].path, s2: dirs[ndirs].path, n: dirs[j].length) == 0) |
12695 | dirs[ndirs].prefix = j; |
12696 | |
12697 | ++ndirs; |
12698 | } |
12699 | |
12700 | /* Now to the actual work. We have to find a subset of the directories which |
12701 | allow expressing the file name using references to the directory table |
12702 | with the least amount of characters. We do not do an exhaustive search |
12703 | where we would have to check out every combination of every single |
12704 | possible prefix. Instead we use a heuristic which provides nearly optimal |
12705 | results in most cases and never is much off. */ |
12706 | saved = XALLOCAVEC (int, ndirs); |
12707 | savehere = XALLOCAVEC (int, ndirs); |
12708 | |
12709 | memset (s: saved, c: '\0', n: ndirs * sizeof (saved[0])); |
12710 | for (i = 0; i < ndirs; i++) |
12711 | { |
12712 | int j; |
12713 | int total; |
12714 | |
12715 | /* We can always save some space for the current directory. But this |
12716 | does not mean it will be enough to justify adding the directory. */ |
12717 | savehere[i] = dirs[i].length; |
12718 | total = (savehere[i] - saved[i]) * dirs[i].count; |
12719 | |
12720 | for (j = i + 1; j < ndirs; j++) |
12721 | { |
12722 | savehere[j] = 0; |
12723 | if (saved[j] < dirs[i].length) |
12724 | { |
12725 | /* Determine whether the dirs[i] path is a prefix of the |
12726 | dirs[j] path. */ |
12727 | int k; |
12728 | |
12729 | k = dirs[j].prefix; |
12730 | while (k != -1 && k != (int) i) |
12731 | k = dirs[k].prefix; |
12732 | |
12733 | if (k == (int) i) |
12734 | { |
12735 | /* Yes it is. We can possibly save some memory by |
12736 | writing the filenames in dirs[j] relative to |
12737 | dirs[i]. */ |
12738 | savehere[j] = dirs[i].length; |
12739 | total += (savehere[j] - saved[j]) * dirs[j].count; |
12740 | } |
12741 | } |
12742 | } |
12743 | |
12744 | /* Check whether we can save enough to justify adding the dirs[i] |
12745 | directory. */ |
12746 | if (total > dirs[i].length + 1) |
12747 | { |
12748 | /* It's worthwhile adding. */ |
12749 | for (j = i; j < ndirs; j++) |
12750 | if (savehere[j] > 0) |
12751 | { |
12752 | /* Remember how much we saved for this directory so far. */ |
12753 | saved[j] = savehere[j]; |
12754 | |
12755 | /* Remember the prefix directory. */ |
12756 | dirs[j].dir_idx = i; |
12757 | } |
12758 | } |
12759 | } |
12760 | |
12761 | /* Emit the directory name table. */ |
12762 | idx_offset = dirs[0].length > 0 ? 1 : 0; |
12763 | enum dwarf_form str_form = DW_FORM_string; |
12764 | enum dwarf_form idx_form = DW_FORM_udata; |
12765 | if (dwarf_version >= 5) |
12766 | { |
12767 | const char *comp_dir = comp_dir_string (); |
12768 | if (comp_dir == NULL) |
12769 | comp_dir = "" ; |
12770 | dw2_asm_output_data (1, 1, "Directory entry format count" ); |
12771 | if (DWARF5_USE_DEBUG_LINE_STR) |
12772 | str_form = DW_FORM_line_strp; |
12773 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
12774 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
12775 | get_DW_FORM_name (form: str_form)); |
12776 | dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count" ); |
12777 | if (str_form == DW_FORM_string) |
12778 | { |
12779 | dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x" , 0); |
12780 | for (i = 1 - idx_offset; i < ndirs; i++) |
12781 | dw2_asm_output_nstring (dirs[i].path, |
12782 | dirs[i].length |
12783 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR, |
12784 | "Directory Entry: %#x" , i + idx_offset); |
12785 | } |
12786 | else |
12787 | { |
12788 | output_line_string (form: str_form, str: comp_dir, entry_kind: "Directory Entry" , idx: 0); |
12789 | for (i = 1 - idx_offset; i < ndirs; i++) |
12790 | { |
12791 | const char *str |
12792 | = ggc_alloc_string (contents: dirs[i].path, |
12793 | length: dirs[i].length |
12794 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR); |
12795 | output_line_string (form: str_form, str, entry_kind: "Directory Entry" , |
12796 | idx: (unsigned) i + idx_offset); |
12797 | } |
12798 | } |
12799 | } |
12800 | else |
12801 | { |
12802 | for (i = 1 - idx_offset; i < ndirs; i++) |
12803 | dw2_asm_output_nstring (dirs[i].path, |
12804 | dirs[i].length |
12805 | - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR, |
12806 | "Directory Entry: %#x" , i + idx_offset); |
12807 | |
12808 | dw2_asm_output_data (1, 0, "End directory table" ); |
12809 | } |
12810 | |
12811 | /* We have to emit them in the order of emitted_number since that's |
12812 | used in the debug info generation. To do this efficiently we |
12813 | generate a back-mapping of the indices first. */ |
12814 | backmap = XALLOCAVEC (int, numfiles); |
12815 | for (i = 0; i < numfiles; i++) |
12816 | backmap[files[i].file_idx->emitted_number - 1] = i; |
12817 | |
12818 | if (dwarf_version >= 5) |
12819 | { |
12820 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
12821 | if (filename0 == NULL) |
12822 | filename0 = "" ; |
12823 | /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and |
12824 | DW_FORM_data2. Choose one based on the number of directories |
12825 | and how much space would they occupy in each encoding. |
12826 | If we have at most 256 directories, all indexes fit into |
12827 | a single byte, so DW_FORM_data1 is most compact (if there |
12828 | are at most 128 directories, DW_FORM_udata would be as |
12829 | compact as that, but not shorter and slower to decode). */ |
12830 | if (ndirs + idx_offset <= 256) |
12831 | idx_form = DW_FORM_data1; |
12832 | /* If there are more than 65536 directories, we have to use |
12833 | DW_FORM_udata, DW_FORM_data2 can't refer to them. |
12834 | Otherwise, compute what space would occupy if all the indexes |
12835 | used DW_FORM_udata - sum - and compare that to how large would |
12836 | be DW_FORM_data2 encoding, and pick the more efficient one. */ |
12837 | else if (ndirs + idx_offset <= 65536) |
12838 | { |
12839 | unsigned HOST_WIDE_INT sum = 1; |
12840 | for (i = 0; i < numfiles; i++) |
12841 | { |
12842 | int file_idx = backmap[i]; |
12843 | int dir_idx = dirs[files[file_idx].dir_idx].dir_idx; |
12844 | sum += size_of_uleb128 (dir_idx); |
12845 | } |
12846 | if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1)) |
12847 | idx_form = DW_FORM_data2; |
12848 | } |
12849 | #ifdef VMS_DEBUGGING_INFO |
12850 | dw2_asm_output_data (1, 4, "File name entry format count" ); |
12851 | #else |
12852 | dw2_asm_output_data (1, 2, "File name entry format count" ); |
12853 | #endif |
12854 | dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path" ); |
12855 | dw2_asm_output_data_uleb128 (str_form, "%s" , |
12856 | get_DW_FORM_name (form: str_form)); |
12857 | dw2_asm_output_data_uleb128 (DW_LNCT_directory_index, |
12858 | "DW_LNCT_directory_index" ); |
12859 | dw2_asm_output_data_uleb128 (idx_form, "%s" , |
12860 | get_DW_FORM_name (form: idx_form)); |
12861 | #ifdef VMS_DEBUGGING_INFO |
12862 | dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp" ); |
12863 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
12864 | dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size" ); |
12865 | dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata" ); |
12866 | #endif |
12867 | dw2_asm_output_data_uleb128 (numfiles + 1, "File names count" ); |
12868 | |
12869 | output_line_string (form: str_form, str: filename0, entry_kind: "File Entry" , idx: 0); |
12870 | |
12871 | /* Include directory index. */ |
12872 | if (idx_form != DW_FORM_udata) |
12873 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
12874 | 0, NULL); |
12875 | else |
12876 | dw2_asm_output_data_uleb128 (0, NULL); |
12877 | |
12878 | #ifdef VMS_DEBUGGING_INFO |
12879 | dw2_asm_output_data_uleb128 (0, NULL); |
12880 | dw2_asm_output_data_uleb128 (0, NULL); |
12881 | #endif |
12882 | } |
12883 | |
12884 | /* Now write all the file names. */ |
12885 | for (i = 0; i < numfiles; i++) |
12886 | { |
12887 | int file_idx = backmap[i]; |
12888 | int dir_idx = dirs[files[file_idx].dir_idx].dir_idx; |
12889 | |
12890 | #ifdef VMS_DEBUGGING_INFO |
12891 | #define MAX_VMS_VERSION_LEN 6 /* ";32768" */ |
12892 | |
12893 | /* Setting these fields can lead to debugger miscomparisons, |
12894 | but VMS Debug requires them to be set correctly. */ |
12895 | |
12896 | int ver; |
12897 | long long cdt; |
12898 | long siz; |
12899 | int maxfilelen = (strlen (files[file_idx].path) |
12900 | + dirs[dir_idx].length |
12901 | + MAX_VMS_VERSION_LEN + 1); |
12902 | char *filebuf = XALLOCAVEC (char, maxfilelen); |
12903 | |
12904 | vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver); |
12905 | snprintf (filebuf, maxfilelen, "%s;%d" , |
12906 | files[file_idx].path + dirs[dir_idx].length, ver); |
12907 | |
12908 | output_line_string (str_form, filebuf, "File Entry" , (unsigned) i + 1); |
12909 | |
12910 | /* Include directory index. */ |
12911 | if (dwarf_version >= 5 && idx_form != DW_FORM_udata) |
12912 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
12913 | dir_idx + idx_offset, NULL); |
12914 | else |
12915 | dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL); |
12916 | |
12917 | /* Modification time. */ |
12918 | dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path, |
12919 | &cdt, 0, 0, 0) == 0) |
12920 | ? cdt : 0, NULL); |
12921 | |
12922 | /* File length in bytes. */ |
12923 | dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path, |
12924 | 0, &siz, 0, 0) == 0) |
12925 | ? siz : 0, NULL); |
12926 | #else |
12927 | output_line_string (form: str_form, |
12928 | str: files[file_idx].path + dirs[dir_idx].length, |
12929 | entry_kind: "File Entry" , idx: (unsigned) i + 1); |
12930 | |
12931 | /* Include directory index. */ |
12932 | if (dwarf_version >= 5 && idx_form != DW_FORM_udata) |
12933 | dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2, |
12934 | dir_idx + idx_offset, NULL); |
12935 | else |
12936 | dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL); |
12937 | |
12938 | if (dwarf_version >= 5) |
12939 | continue; |
12940 | |
12941 | /* Modification time. */ |
12942 | dw2_asm_output_data_uleb128 (0, NULL); |
12943 | |
12944 | /* File length in bytes. */ |
12945 | dw2_asm_output_data_uleb128 (0, NULL); |
12946 | #endif /* VMS_DEBUGGING_INFO */ |
12947 | } |
12948 | |
12949 | if (dwarf_version < 5) |
12950 | dw2_asm_output_data (1, 0, "End file name table" ); |
12951 | } |
12952 | |
12953 | |
12954 | /* Output one line number table into the .debug_line section. */ |
12955 | |
12956 | static void |
12957 | output_one_line_info_table (dw_line_info_table *table) |
12958 | { |
12959 | char line_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
12960 | unsigned int current_line = 1; |
12961 | bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START; |
12962 | dw_line_info_entry *ent, *prev_addr = NULL; |
12963 | size_t i; |
12964 | unsigned int view; |
12965 | |
12966 | view = 0; |
12967 | |
12968 | FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent) |
12969 | { |
12970 | switch (ent->opcode) |
12971 | { |
12972 | case LI_set_address: |
12973 | /* ??? Unfortunately, we have little choice here currently, and |
12974 | must always use the most general form. GCC does not know the |
12975 | address delta itself, so we can't use DW_LNS_advance_pc. Many |
12976 | ports do have length attributes which will give an upper bound |
12977 | on the address range. We could perhaps use length attributes |
12978 | to determine when it is safe to use DW_LNS_fixed_advance_pc. */ |
12979 | ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val); |
12980 | |
12981 | view = 0; |
12982 | |
12983 | /* This can handle any delta. This takes |
12984 | 4+DWARF2_ADDR_SIZE bytes. */ |
12985 | dw2_asm_output_data (1, 0, "set address %s%s" , line_label, |
12986 | debug_variable_location_views |
12987 | ? ", reset view to 0" : "" ); |
12988 | dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL); |
12989 | dw2_asm_output_data (1, DW_LNE_set_address, NULL); |
12990 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL); |
12991 | |
12992 | prev_addr = ent; |
12993 | break; |
12994 | |
12995 | case LI_adv_address: |
12996 | { |
12997 | ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val); |
12998 | char prev_label[MAX_ARTIFICIAL_LABEL_BYTES]; |
12999 | ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val); |
13000 | |
13001 | view++; |
13002 | |
13003 | dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i" , view); |
13004 | dw2_asm_output_delta (2, line_label, prev_label, |
13005 | "from %s to %s" , prev_label, line_label); |
13006 | |
13007 | prev_addr = ent; |
13008 | break; |
13009 | } |
13010 | |
13011 | case LI_set_line: |
13012 | if (ent->val == current_line) |
13013 | { |
13014 | /* We still need to start a new row, so output a copy insn. */ |
13015 | dw2_asm_output_data (1, DW_LNS_copy, |
13016 | "copy line %u" , current_line); |
13017 | } |
13018 | else |
13019 | { |
13020 | int line_offset = ent->val - current_line; |
13021 | int line_delta = line_offset - DWARF_LINE_BASE; |
13022 | |
13023 | current_line = ent->val; |
13024 | if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1)) |
13025 | { |
13026 | /* This can handle deltas from -10 to 234, using the current |
13027 | definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE. |
13028 | This takes 1 byte. */ |
13029 | dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta, |
13030 | "line %u" , current_line); |
13031 | } |
13032 | else |
13033 | { |
13034 | /* This can handle any delta. This takes at least 4 bytes, |
13035 | depending on the value being encoded. */ |
13036 | dw2_asm_output_data (1, DW_LNS_advance_line, |
13037 | "advance to line %u" , current_line); |
13038 | dw2_asm_output_data_sleb128 (line_offset, NULL); |
13039 | dw2_asm_output_data (1, DW_LNS_copy, NULL); |
13040 | } |
13041 | } |
13042 | break; |
13043 | |
13044 | case LI_set_file: |
13045 | dw2_asm_output_data (1, DW_LNS_set_file, "set file %u" , ent->val); |
13046 | dw2_asm_output_data_uleb128 (ent->val, "%u" , ent->val); |
13047 | break; |
13048 | |
13049 | case LI_set_column: |
13050 | dw2_asm_output_data (1, DW_LNS_set_column, "column %u" , ent->val); |
13051 | dw2_asm_output_data_uleb128 (ent->val, "%u" , ent->val); |
13052 | break; |
13053 | |
13054 | case LI_negate_stmt: |
13055 | current_is_stmt = !current_is_stmt; |
13056 | dw2_asm_output_data (1, DW_LNS_negate_stmt, |
13057 | "is_stmt %d" , current_is_stmt); |
13058 | break; |
13059 | |
13060 | case LI_set_prologue_end: |
13061 | dw2_asm_output_data (1, DW_LNS_set_prologue_end, |
13062 | "set prologue end" ); |
13063 | break; |
13064 | |
13065 | case LI_set_epilogue_begin: |
13066 | dw2_asm_output_data (1, DW_LNS_set_epilogue_begin, |
13067 | "set epilogue begin" ); |
13068 | break; |
13069 | |
13070 | case LI_set_discriminator: |
13071 | dw2_asm_output_data (1, 0, "discriminator %u" , ent->val); |
13072 | dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL); |
13073 | dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL); |
13074 | dw2_asm_output_data_uleb128 (ent->val, NULL); |
13075 | break; |
13076 | } |
13077 | } |
13078 | |
13079 | /* Emit debug info for the address of the end of the table. */ |
13080 | dw2_asm_output_data (1, 0, "set address %s" , table->end_label); |
13081 | dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL); |
13082 | dw2_asm_output_data (1, DW_LNE_set_address, NULL); |
13083 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL); |
13084 | |
13085 | dw2_asm_output_data (1, 0, "end sequence" ); |
13086 | dw2_asm_output_data_uleb128 (1, NULL); |
13087 | dw2_asm_output_data (1, DW_LNE_end_sequence, NULL); |
13088 | } |
13089 | |
13090 | static unsigned int output_line_info_generation; |
13091 | |
13092 | /* Output the source line number correspondence information. This |
13093 | information goes into the .debug_line section. */ |
13094 | |
13095 | static void |
13096 | output_line_info (bool prologue_only) |
13097 | { |
13098 | char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
13099 | char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES]; |
13100 | bool saw_one = false; |
13101 | int opc; |
13102 | |
13103 | ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, |
13104 | output_line_info_generation); |
13105 | ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, |
13106 | output_line_info_generation); |
13107 | ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, |
13108 | output_line_info_generation); |
13109 | ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, |
13110 | output_line_info_generation++); |
13111 | |
13112 | if (!XCOFF_DEBUGGING_INFO) |
13113 | { |
13114 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
13115 | dw2_asm_output_data (4, 0xffffffff, |
13116 | "Initial length escape value indicating 64-bit DWARF extension" ); |
13117 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
13118 | "Length of Source Line Info" ); |
13119 | } |
13120 | |
13121 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
13122 | |
13123 | output_dwarf_version (); |
13124 | if (dwarf_version >= 5) |
13125 | { |
13126 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
13127 | dw2_asm_output_data (1, 0, "Segment Size" ); |
13128 | } |
13129 | dw2_asm_output_delta (dwarf_offset_size, p2, p1, "Prolog Length" ); |
13130 | ASM_OUTPUT_LABEL (asm_out_file, p1); |
13131 | |
13132 | /* Define the architecture-dependent minimum instruction length (in bytes). |
13133 | In this implementation of DWARF, this field is used for information |
13134 | purposes only. Since GCC generates assembly language, we have no |
13135 | a priori knowledge of how many instruction bytes are generated for each |
13136 | source line, and therefore can use only the DW_LNE_set_address and |
13137 | DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix |
13138 | this as '1', which is "correct enough" for all architectures, |
13139 | and don't let the target override. */ |
13140 | dw2_asm_output_data (1, 1, "Minimum Instruction Length" ); |
13141 | |
13142 | if (dwarf_version >= 4) |
13143 | dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN, |
13144 | "Maximum Operations Per Instruction" ); |
13145 | dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START, |
13146 | "Default is_stmt_start flag" ); |
13147 | dw2_asm_output_data (1, DWARF_LINE_BASE, |
13148 | "Line Base Value (Special Opcodes)" ); |
13149 | dw2_asm_output_data (1, DWARF_LINE_RANGE, |
13150 | "Line Range Value (Special Opcodes)" ); |
13151 | dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE, |
13152 | "Special Opcode Base" ); |
13153 | |
13154 | for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++) |
13155 | { |
13156 | int n_op_args; |
13157 | switch (opc) |
13158 | { |
13159 | case DW_LNS_advance_pc: |
13160 | case DW_LNS_advance_line: |
13161 | case DW_LNS_set_file: |
13162 | case DW_LNS_set_column: |
13163 | case DW_LNS_fixed_advance_pc: |
13164 | case DW_LNS_set_isa: |
13165 | n_op_args = 1; |
13166 | break; |
13167 | default: |
13168 | n_op_args = 0; |
13169 | break; |
13170 | } |
13171 | |
13172 | dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args" , |
13173 | opc, n_op_args); |
13174 | } |
13175 | |
13176 | /* Write out the information about the files we use. */ |
13177 | output_file_names (); |
13178 | ASM_OUTPUT_LABEL (asm_out_file, p2); |
13179 | if (prologue_only) |
13180 | { |
13181 | /* Output the marker for the end of the line number info. */ |
13182 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
13183 | return; |
13184 | } |
13185 | |
13186 | if (separate_line_info) |
13187 | { |
13188 | dw_line_info_table *table; |
13189 | size_t i; |
13190 | |
13191 | FOR_EACH_VEC_ELT (*separate_line_info, i, table) |
13192 | if (table->in_use) |
13193 | { |
13194 | output_one_line_info_table (table); |
13195 | saw_one = true; |
13196 | } |
13197 | } |
13198 | if (cold_text_section_line_info && cold_text_section_line_info->in_use) |
13199 | { |
13200 | output_one_line_info_table (table: cold_text_section_line_info); |
13201 | saw_one = true; |
13202 | } |
13203 | |
13204 | /* ??? Some Darwin linkers crash on a .debug_line section with no |
13205 | sequences. Further, merely a DW_LNE_end_sequence entry is not |
13206 | sufficient -- the address column must also be initialized. |
13207 | Make sure to output at least one set_address/end_sequence pair, |
13208 | choosing .text since that section is always present. */ |
13209 | if (text_section_line_info->in_use || !saw_one) |
13210 | output_one_line_info_table (table: text_section_line_info); |
13211 | |
13212 | /* Output the marker for the end of the line number info. */ |
13213 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
13214 | } |
13215 | |
13216 | /* Return true if DW_AT_endianity should be emitted according to REVERSE. */ |
13217 | |
13218 | static inline bool |
13219 | need_endianity_attribute_p (bool reverse) |
13220 | { |
13221 | return reverse && (dwarf_version >= 3 || !dwarf_strict); |
13222 | } |
13223 | |
13224 | /* Given a pointer to a tree node for some base type, return a pointer to |
13225 | a DIE that describes the given type. REVERSE is true if the type is |
13226 | to be interpreted in the reverse storage order wrt the target order. |
13227 | |
13228 | This routine must only be called for GCC type nodes that correspond to |
13229 | Dwarf base (fundamental) types. */ |
13230 | |
13231 | dw_die_ref |
13232 | base_type_die (tree type, bool reverse) |
13233 | { |
13234 | dw_die_ref base_type_result; |
13235 | enum dwarf_type encoding; |
13236 | bool fpt_used = false; |
13237 | struct fixed_point_type_info fpt_info; |
13238 | tree type_bias = NULL_TREE; |
13239 | |
13240 | /* If this is a subtype that should not be emitted as a subrange type, |
13241 | use the base type. See subrange_type_for_debug_p. */ |
13242 | if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE) |
13243 | type = TREE_TYPE (type); |
13244 | |
13245 | switch (TREE_CODE (type)) |
13246 | { |
13247 | case INTEGER_TYPE: |
13248 | if ((dwarf_version >= 4 || !dwarf_strict) |
13249 | && TYPE_NAME (type) |
13250 | && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
13251 | && DECL_IS_UNDECLARED_BUILTIN (TYPE_NAME (type)) |
13252 | && DECL_NAME (TYPE_NAME (type))) |
13253 | { |
13254 | const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); |
13255 | if (strcmp (s1: name, s2: "char16_t" ) == 0 |
13256 | || strcmp (s1: name, s2: "char8_t" ) == 0 |
13257 | || strcmp (s1: name, s2: "char32_t" ) == 0) |
13258 | { |
13259 | encoding = DW_ATE_UTF; |
13260 | break; |
13261 | } |
13262 | } |
13263 | if ((dwarf_version >= 3 || !dwarf_strict) |
13264 | && lang_hooks.types.get_fixed_point_type_info) |
13265 | { |
13266 | memset (s: &fpt_info, c: 0, n: sizeof (fpt_info)); |
13267 | if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info)) |
13268 | { |
13269 | fpt_used = true; |
13270 | encoding = ((TYPE_UNSIGNED (type)) |
13271 | ? DW_ATE_unsigned_fixed |
13272 | : DW_ATE_signed_fixed); |
13273 | break; |
13274 | } |
13275 | } |
13276 | if (TYPE_STRING_FLAG (type)) |
13277 | { |
13278 | if ((dwarf_version >= 4 || !dwarf_strict) |
13279 | && is_rust () |
13280 | && int_size_in_bytes (type) == 4) |
13281 | encoding = DW_ATE_UTF; |
13282 | else if (TYPE_UNSIGNED (type)) |
13283 | encoding = DW_ATE_unsigned_char; |
13284 | else |
13285 | encoding = DW_ATE_signed_char; |
13286 | } |
13287 | else if (TYPE_UNSIGNED (type)) |
13288 | encoding = DW_ATE_unsigned; |
13289 | else |
13290 | encoding = DW_ATE_signed; |
13291 | |
13292 | if (!dwarf_strict |
13293 | && lang_hooks.types.get_type_bias) |
13294 | type_bias = lang_hooks.types.get_type_bias (type); |
13295 | break; |
13296 | |
13297 | case REAL_TYPE: |
13298 | if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))) |
13299 | { |
13300 | if (dwarf_version >= 3 || !dwarf_strict) |
13301 | encoding = DW_ATE_decimal_float; |
13302 | else |
13303 | encoding = DW_ATE_lo_user; |
13304 | } |
13305 | else |
13306 | encoding = DW_ATE_float; |
13307 | break; |
13308 | |
13309 | case FIXED_POINT_TYPE: |
13310 | if (!(dwarf_version >= 3 || !dwarf_strict)) |
13311 | encoding = DW_ATE_lo_user; |
13312 | else if (TYPE_UNSIGNED (type)) |
13313 | encoding = DW_ATE_unsigned_fixed; |
13314 | else |
13315 | encoding = DW_ATE_signed_fixed; |
13316 | break; |
13317 | |
13318 | /* Dwarf2 doesn't know anything about complex ints, so use |
13319 | a user defined type for it. */ |
13320 | case COMPLEX_TYPE: |
13321 | if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (type))) |
13322 | encoding = DW_ATE_complex_float; |
13323 | else |
13324 | encoding = DW_ATE_lo_user; |
13325 | break; |
13326 | |
13327 | case BOOLEAN_TYPE: |
13328 | /* GNU FORTRAN/Ada/C++ BOOLEAN type. */ |
13329 | encoding = DW_ATE_boolean; |
13330 | break; |
13331 | |
13332 | case BITINT_TYPE: |
13333 | /* C23 _BitInt(N). */ |
13334 | if (TYPE_UNSIGNED (type)) |
13335 | encoding = DW_ATE_unsigned; |
13336 | else |
13337 | encoding = DW_ATE_signed; |
13338 | break; |
13339 | |
13340 | default: |
13341 | /* No other TREE_CODEs are Dwarf fundamental types. */ |
13342 | gcc_unreachable (); |
13343 | } |
13344 | |
13345 | base_type_result = new_die_raw (tag_value: DW_TAG_base_type); |
13346 | |
13347 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_byte_size, |
13348 | unsigned_val: int_size_in_bytes (type)); |
13349 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_encoding, unsigned_val: encoding); |
13350 | if (TREE_CODE (type) == BITINT_TYPE) |
13351 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_bit_size, TYPE_PRECISION (type)); |
13352 | |
13353 | if (need_endianity_attribute_p (reverse)) |
13354 | add_AT_unsigned (die: base_type_result, attr_kind: DW_AT_endianity, |
13355 | BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big); |
13356 | |
13357 | add_alignment_attribute (base_type_result, type); |
13358 | |
13359 | if (fpt_used) |
13360 | { |
13361 | switch (fpt_info.scale_factor_kind) |
13362 | { |
13363 | case fixed_point_scale_factor_binary: |
13364 | add_AT_int (die: base_type_result, attr_kind: DW_AT_binary_scale, |
13365 | int_val: fpt_info.scale_factor.binary); |
13366 | break; |
13367 | |
13368 | case fixed_point_scale_factor_decimal: |
13369 | add_AT_int (die: base_type_result, attr_kind: DW_AT_decimal_scale, |
13370 | int_val: fpt_info.scale_factor.decimal); |
13371 | break; |
13372 | |
13373 | case fixed_point_scale_factor_arbitrary: |
13374 | /* Arbitrary scale factors cannot be described in standard DWARF. */ |
13375 | if (!dwarf_strict) |
13376 | { |
13377 | /* Describe the scale factor as a rational constant. */ |
13378 | const dw_die_ref scale_factor |
13379 | = new_die (tag_value: DW_TAG_constant, parent_die: comp_unit_die (), t: type); |
13380 | |
13381 | add_scalar_info (scale_factor, DW_AT_GNU_numerator, |
13382 | fpt_info.scale_factor.arbitrary.numerator, |
13383 | dw_scalar_form_constant, NULL); |
13384 | add_scalar_info (scale_factor, DW_AT_GNU_denominator, |
13385 | fpt_info.scale_factor.arbitrary.denominator, |
13386 | dw_scalar_form_constant, NULL); |
13387 | |
13388 | add_AT_die_ref (die: base_type_result, attr_kind: DW_AT_small, targ_die: scale_factor); |
13389 | } |
13390 | break; |
13391 | |
13392 | default: |
13393 | gcc_unreachable (); |
13394 | } |
13395 | } |
13396 | |
13397 | if (type_bias) |
13398 | add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias, |
13399 | dw_scalar_form_constant |
13400 | | dw_scalar_form_exprloc |
13401 | | dw_scalar_form_reference, |
13402 | NULL); |
13403 | |
13404 | return base_type_result; |
13405 | } |
13406 | |
13407 | /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM |
13408 | named 'auto' in its type: return true for it, false otherwise. */ |
13409 | |
13410 | static inline bool |
13411 | is_cxx_auto (tree type) |
13412 | { |
13413 | if (is_cxx ()) |
13414 | { |
13415 | tree name = TYPE_IDENTIFIER (type); |
13416 | if (name == get_identifier ("auto" ) |
13417 | || name == get_identifier ("decltype(auto)" )) |
13418 | return true; |
13419 | } |
13420 | return false; |
13421 | } |
13422 | |
13423 | /* Given a pointer to an arbitrary ..._TYPE tree node, return true if the |
13424 | given input type is a Dwarf "fundamental" type. Otherwise return null. */ |
13425 | |
13426 | static inline bool |
13427 | is_base_type (tree type) |
13428 | { |
13429 | switch (TREE_CODE (type)) |
13430 | { |
13431 | case INTEGER_TYPE: |
13432 | case REAL_TYPE: |
13433 | case FIXED_POINT_TYPE: |
13434 | case COMPLEX_TYPE: |
13435 | case BOOLEAN_TYPE: |
13436 | case BITINT_TYPE: |
13437 | return true; |
13438 | |
13439 | case VOID_TYPE: |
13440 | case OPAQUE_TYPE: |
13441 | case ARRAY_TYPE: |
13442 | case RECORD_TYPE: |
13443 | case UNION_TYPE: |
13444 | case QUAL_UNION_TYPE: |
13445 | case ENUMERAL_TYPE: |
13446 | case FUNCTION_TYPE: |
13447 | case METHOD_TYPE: |
13448 | case POINTER_TYPE: |
13449 | case REFERENCE_TYPE: |
13450 | case NULLPTR_TYPE: |
13451 | case OFFSET_TYPE: |
13452 | case LANG_TYPE: |
13453 | case VECTOR_TYPE: |
13454 | return false; |
13455 | |
13456 | default: |
13457 | if (is_cxx_auto (type)) |
13458 | return false; |
13459 | gcc_unreachable (); |
13460 | } |
13461 | } |
13462 | |
13463 | /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE |
13464 | node, return the size in bits for the type if it is a constant, or else |
13465 | return the alignment for the type if the type's size is not constant, or |
13466 | else return BITS_PER_WORD if the type actually turns out to be an |
13467 | ERROR_MARK node. */ |
13468 | |
13469 | static inline unsigned HOST_WIDE_INT |
13470 | simple_type_size_in_bits (const_tree type) |
13471 | { |
13472 | if (TREE_CODE (type) == ERROR_MARK) |
13473 | return BITS_PER_WORD; |
13474 | else if (TYPE_SIZE (type) == NULL_TREE) |
13475 | return 0; |
13476 | else if (tree_fits_uhwi_p (TYPE_SIZE (type))) |
13477 | return tree_to_uhwi (TYPE_SIZE (type)); |
13478 | else |
13479 | return TYPE_ALIGN (type); |
13480 | } |
13481 | |
13482 | /* Similarly, but return an offset_int instead of UHWI. */ |
13483 | |
13484 | static inline offset_int |
13485 | offset_int_type_size_in_bits (const_tree type) |
13486 | { |
13487 | if (TREE_CODE (type) == ERROR_MARK) |
13488 | return BITS_PER_WORD; |
13489 | else if (TYPE_SIZE (type) == NULL_TREE) |
13490 | return 0; |
13491 | else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST) |
13492 | return wi::to_offset (TYPE_SIZE (type)); |
13493 | else |
13494 | return TYPE_ALIGN (type); |
13495 | } |
13496 | |
13497 | /* Given a pointer to a tree node for a subrange type, return a pointer |
13498 | to a DIE that describes the given type. */ |
13499 | |
13500 | static dw_die_ref |
13501 | subrange_type_die (tree type, tree low, tree high, tree bias, |
13502 | dw_die_ref context_die) |
13503 | { |
13504 | dw_die_ref subrange_die; |
13505 | const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type); |
13506 | |
13507 | if (context_die == NULL) |
13508 | context_die = comp_unit_die (); |
13509 | |
13510 | subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: context_die, t: type); |
13511 | |
13512 | if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes) |
13513 | { |
13514 | /* The size of the subrange type and its base type do not match, |
13515 | so we need to generate a size attribute for the subrange type. */ |
13516 | add_AT_unsigned (die: subrange_die, attr_kind: DW_AT_byte_size, unsigned_val: size_in_bytes); |
13517 | } |
13518 | |
13519 | add_alignment_attribute (subrange_die, type); |
13520 | |
13521 | if (low) |
13522 | add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL); |
13523 | if (high) |
13524 | add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL); |
13525 | if (bias && !dwarf_strict) |
13526 | add_scalar_info (subrange_die, DW_AT_GNU_bias, bias, |
13527 | dw_scalar_form_constant |
13528 | | dw_scalar_form_exprloc |
13529 | | dw_scalar_form_reference, |
13530 | NULL); |
13531 | |
13532 | return subrange_die; |
13533 | } |
13534 | |
13535 | /* Returns the (const and/or volatile) cv_qualifiers associated with |
13536 | the decl node. This will normally be augmented with the |
13537 | cv_qualifiers of the underlying type in add_type_attribute. */ |
13538 | |
13539 | static int |
13540 | decl_quals (const_tree decl) |
13541 | { |
13542 | return ((TREE_READONLY (decl) |
13543 | /* The C++ front-end correctly marks reference-typed |
13544 | variables as readonly, but from a language (and debug |
13545 | info) standpoint they are not const-qualified. */ |
13546 | && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE |
13547 | ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED) |
13548 | | (TREE_THIS_VOLATILE (decl) |
13549 | ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED)); |
13550 | } |
13551 | |
13552 | /* Determine the TYPE whose qualifiers match the largest strict subset |
13553 | of the given TYPE_QUALS, and return its qualifiers. Ignore all |
13554 | qualifiers outside QUAL_MASK. */ |
13555 | |
13556 | static int |
13557 | get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask) |
13558 | { |
13559 | tree t; |
13560 | int best_rank = 0, best_qual = 0, max_rank; |
13561 | |
13562 | type_quals &= qual_mask; |
13563 | max_rank = popcount_hwi (x: type_quals) - 1; |
13564 | |
13565 | for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank; |
13566 | t = TYPE_NEXT_VARIANT (t)) |
13567 | { |
13568 | int q = TYPE_QUALS (t) & qual_mask; |
13569 | |
13570 | if ((q & type_quals) == q && q != type_quals |
13571 | && check_base_type (cand: t, base: type)) |
13572 | { |
13573 | int rank = popcount_hwi (x: q); |
13574 | |
13575 | if (rank > best_rank) |
13576 | { |
13577 | best_rank = rank; |
13578 | best_qual = q; |
13579 | } |
13580 | } |
13581 | } |
13582 | |
13583 | return best_qual; |
13584 | } |
13585 | |
13586 | struct dwarf_qual_info_t { int q; enum dwarf_tag t; }; |
13587 | static const dwarf_qual_info_t dwarf_qual_info[] = |
13588 | { |
13589 | { .q: TYPE_QUAL_CONST, .t: DW_TAG_const_type }, |
13590 | { .q: TYPE_QUAL_VOLATILE, .t: DW_TAG_volatile_type }, |
13591 | { .q: TYPE_QUAL_RESTRICT, .t: DW_TAG_restrict_type }, |
13592 | { .q: TYPE_QUAL_ATOMIC, .t: DW_TAG_atomic_type } |
13593 | }; |
13594 | static const unsigned int dwarf_qual_info_size = ARRAY_SIZE (dwarf_qual_info); |
13595 | |
13596 | /* If DIE is a qualified DIE of some base DIE with the same parent, |
13597 | return the base DIE, otherwise return NULL. Set MASK to the |
13598 | qualifiers added compared to the returned DIE. */ |
13599 | |
13600 | static dw_die_ref |
13601 | qualified_die_p (dw_die_ref die, int *mask, unsigned int depth) |
13602 | { |
13603 | unsigned int i; |
13604 | for (i = 0; i < dwarf_qual_info_size; i++) |
13605 | if (die->die_tag == dwarf_qual_info[i].t) |
13606 | break; |
13607 | if (i == dwarf_qual_info_size) |
13608 | return NULL; |
13609 | if (vec_safe_length (v: die->die_attr) != 1) |
13610 | return NULL; |
13611 | dw_die_ref type = get_AT_ref (die, attr_kind: DW_AT_type); |
13612 | if (type == NULL || type->die_parent != die->die_parent) |
13613 | return NULL; |
13614 | *mask |= dwarf_qual_info[i].q; |
13615 | if (depth) |
13616 | { |
13617 | dw_die_ref ret = qualified_die_p (die: type, mask, depth: depth - 1); |
13618 | if (ret) |
13619 | return ret; |
13620 | } |
13621 | return type; |
13622 | } |
13623 | |
13624 | /* If TYPE is long double or complex long double that |
13625 | should be emitted as artificial typedef to _Float128 or |
13626 | complex _Float128, return the type it should be emitted as. |
13627 | This is done in case the target already supports 16-byte |
13628 | composite floating point type (ibm_extended_format). */ |
13629 | |
13630 | static tree |
13631 | long_double_as_float128 (tree type) |
13632 | { |
13633 | if (type != long_double_type_node |
13634 | && type != complex_long_double_type_node) |
13635 | return NULL_TREE; |
13636 | |
13637 | machine_mode mode, fmode; |
13638 | if (TREE_CODE (type) == COMPLEX_TYPE) |
13639 | mode = TYPE_MODE (TREE_TYPE (type)); |
13640 | else |
13641 | mode = TYPE_MODE (type); |
13642 | if (known_eq (GET_MODE_SIZE (mode), 16) && !MODE_COMPOSITE_P (mode)) |
13643 | FOR_EACH_MODE_IN_CLASS (fmode, MODE_FLOAT) |
13644 | if (known_eq (GET_MODE_SIZE (fmode), 16) |
13645 | && MODE_COMPOSITE_P (fmode)) |
13646 | { |
13647 | if (type == long_double_type_node) |
13648 | { |
13649 | if (float128_type_node |
13650 | && (TYPE_MODE (float128_type_node) |
13651 | == TYPE_MODE (type))) |
13652 | return float128_type_node; |
13653 | return NULL_TREE; |
13654 | } |
13655 | for (int i = 0; i < NUM_FLOATN_NX_TYPES; i++) |
13656 | if (COMPLEX_FLOATN_NX_TYPE_NODE (i) != NULL_TREE |
13657 | && (TYPE_MODE (COMPLEX_FLOATN_NX_TYPE_NODE (i)) |
13658 | == TYPE_MODE (type))) |
13659 | return COMPLEX_FLOATN_NX_TYPE_NODE (i); |
13660 | } |
13661 | |
13662 | return NULL_TREE; |
13663 | } |
13664 | |
13665 | /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging |
13666 | entry that chains the modifiers specified by CV_QUALS in front of the |
13667 | given type. REVERSE is true if the type is to be interpreted in the |
13668 | reverse storage order wrt the target order. */ |
13669 | |
13670 | static dw_die_ref |
13671 | modified_type_die (tree type, int cv_quals, bool reverse, |
13672 | dw_die_ref context_die) |
13673 | { |
13674 | enum tree_code code = TREE_CODE (type); |
13675 | dw_die_ref mod_type_die; |
13676 | dw_die_ref sub_die = NULL; |
13677 | tree item_type = NULL; |
13678 | tree qualified_type; |
13679 | tree name, low, high; |
13680 | dw_die_ref mod_scope; |
13681 | struct array_descr_info info; |
13682 | /* Only these cv-qualifiers are currently handled. */ |
13683 | const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE |
13684 | | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC | |
13685 | ENCODE_QUAL_ADDR_SPACE(~0U)); |
13686 | /* DW_AT_endianity is specified only for base types in the standard. */ |
13687 | const bool reverse_type |
13688 | = need_endianity_attribute_p (reverse) |
13689 | && (is_base_type (type) |
13690 | || (TREE_CODE (type) == ENUMERAL_TYPE && !dwarf_strict)); |
13691 | |
13692 | if (code == ERROR_MARK) |
13693 | return NULL; |
13694 | |
13695 | if (lang_hooks.types.get_debug_type) |
13696 | { |
13697 | tree debug_type = lang_hooks.types.get_debug_type (type); |
13698 | |
13699 | if (debug_type != NULL_TREE && debug_type != type) |
13700 | return modified_type_die (type: debug_type, cv_quals, reverse, context_die); |
13701 | } |
13702 | |
13703 | cv_quals &= cv_qual_mask; |
13704 | |
13705 | /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type |
13706 | tag modifier (and not an attribute) old consumers won't be able |
13707 | to handle it. */ |
13708 | if (dwarf_version < 3) |
13709 | cv_quals &= ~TYPE_QUAL_RESTRICT; |
13710 | |
13711 | /* Likewise for DW_TAG_atomic_type for DWARFv5. */ |
13712 | if (dwarf_version < 5) |
13713 | cv_quals &= ~TYPE_QUAL_ATOMIC; |
13714 | |
13715 | /* See if we already have the appropriately qualified variant of |
13716 | this type. */ |
13717 | qualified_type = get_qualified_type (type, cv_quals); |
13718 | |
13719 | if (qualified_type == sizetype) |
13720 | { |
13721 | /* Try not to expose the internal sizetype type's name. */ |
13722 | if (TYPE_NAME (qualified_type) |
13723 | && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL) |
13724 | { |
13725 | tree t = TREE_TYPE (TYPE_NAME (qualified_type)); |
13726 | |
13727 | gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE |
13728 | && (TYPE_PRECISION (t) |
13729 | == TYPE_PRECISION (qualified_type)) |
13730 | && (TYPE_UNSIGNED (t) |
13731 | == TYPE_UNSIGNED (qualified_type))); |
13732 | qualified_type = t; |
13733 | } |
13734 | else if (qualified_type == sizetype |
13735 | && TREE_CODE (sizetype) == TREE_CODE (size_type_node) |
13736 | && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node) |
13737 | && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node)) |
13738 | qualified_type = size_type_node; |
13739 | if (type == sizetype) |
13740 | type = qualified_type; |
13741 | } |
13742 | |
13743 | /* If we do, then we can just use its DIE, if it exists. */ |
13744 | if (qualified_type) |
13745 | { |
13746 | mod_type_die = lookup_type_die (type: qualified_type); |
13747 | |
13748 | /* DW_AT_endianity doesn't come from a qualifier on the type, so it is |
13749 | dealt with specially: the DIE with the attribute, if it exists, is |
13750 | placed immediately after the regular DIE for the same type. */ |
13751 | if (mod_type_die |
13752 | && (!reverse_type |
13753 | || ((mod_type_die = mod_type_die->die_sib) != NULL |
13754 | && get_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_endianity)))) |
13755 | return mod_type_die; |
13756 | } |
13757 | |
13758 | name = qualified_type ? TYPE_NAME (qualified_type) : NULL; |
13759 | |
13760 | /* Handle C typedef types. */ |
13761 | if (name |
13762 | && TREE_CODE (name) == TYPE_DECL |
13763 | && DECL_ORIGINAL_TYPE (name) |
13764 | && !DECL_ARTIFICIAL (name)) |
13765 | { |
13766 | tree dtype = TREE_TYPE (name); |
13767 | |
13768 | /* Skip the typedef for base types with DW_AT_endianity, no big deal. */ |
13769 | if (qualified_type == dtype && !reverse_type) |
13770 | { |
13771 | tree origin = decl_ultimate_origin (decl: name); |
13772 | |
13773 | /* Typedef variants that have an abstract origin don't get their own |
13774 | type DIE (see gen_typedef_die), so fall back on the ultimate |
13775 | abstract origin instead. */ |
13776 | if (origin != NULL && origin != name) |
13777 | return modified_type_die (TREE_TYPE (origin), cv_quals, reverse, |
13778 | context_die); |
13779 | |
13780 | /* For a named type, use the typedef. */ |
13781 | gen_type_die (qualified_type, context_die); |
13782 | return lookup_type_die (type: qualified_type); |
13783 | } |
13784 | else |
13785 | { |
13786 | int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype); |
13787 | dquals &= cv_qual_mask; |
13788 | if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED |
13789 | || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type)) |
13790 | /* cv-unqualified version of named type. Just use |
13791 | the unnamed type to which it refers. */ |
13792 | return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals, |
13793 | reverse, context_die); |
13794 | /* Else cv-qualified version of named type; fall through. */ |
13795 | } |
13796 | } |
13797 | |
13798 | mod_scope = scope_die_for (type, context_die); |
13799 | |
13800 | if (cv_quals) |
13801 | { |
13802 | int sub_quals = 0, first_quals = 0; |
13803 | unsigned i; |
13804 | dw_die_ref first = NULL, last = NULL; |
13805 | |
13806 | /* Determine a lesser qualified type that most closely matches |
13807 | this one. Then generate DW_TAG_* entries for the remaining |
13808 | qualifiers. */ |
13809 | sub_quals = get_nearest_type_subqualifiers (type, type_quals: cv_quals, |
13810 | qual_mask: cv_qual_mask); |
13811 | if (sub_quals && use_debug_types) |
13812 | { |
13813 | bool needed = false; |
13814 | /* If emitting type units, make sure the order of qualifiers |
13815 | is canonical. Thus, start from unqualified type if |
13816 | an earlier qualifier is missing in sub_quals, but some later |
13817 | one is present there. */ |
13818 | for (i = 0; i < dwarf_qual_info_size; i++) |
13819 | if (dwarf_qual_info[i].q & cv_quals & ~sub_quals) |
13820 | needed = true; |
13821 | else if (needed && (dwarf_qual_info[i].q & cv_quals)) |
13822 | { |
13823 | sub_quals = 0; |
13824 | break; |
13825 | } |
13826 | } |
13827 | mod_type_die = modified_type_die (type, cv_quals: sub_quals, reverse, context_die); |
13828 | if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope) |
13829 | { |
13830 | /* As not all intermediate qualified DIEs have corresponding |
13831 | tree types, ensure that qualified DIEs in the same scope |
13832 | as their DW_AT_type are emitted after their DW_AT_type, |
13833 | only with other qualified DIEs for the same type possibly |
13834 | in between them. Determine the range of such qualified |
13835 | DIEs now (first being the base type, last being corresponding |
13836 | last qualified DIE for it). */ |
13837 | unsigned int count = 0; |
13838 | first = qualified_die_p (die: mod_type_die, mask: &first_quals, |
13839 | depth: dwarf_qual_info_size); |
13840 | if (first == NULL) |
13841 | first = mod_type_die; |
13842 | gcc_assert ((first_quals & ~sub_quals) == 0); |
13843 | for (count = 0, last = first; |
13844 | count < (1U << dwarf_qual_info_size); |
13845 | count++, last = last->die_sib) |
13846 | { |
13847 | int quals = 0; |
13848 | if (last == mod_scope->die_child) |
13849 | break; |
13850 | if (qualified_die_p (die: last->die_sib, mask: &quals, depth: dwarf_qual_info_size) |
13851 | != first) |
13852 | break; |
13853 | } |
13854 | } |
13855 | |
13856 | for (i = 0; i < dwarf_qual_info_size; i++) |
13857 | if (dwarf_qual_info[i].q & cv_quals & ~sub_quals) |
13858 | { |
13859 | dw_die_ref d; |
13860 | if (first && first != last) |
13861 | { |
13862 | for (d = first->die_sib; ; d = d->die_sib) |
13863 | { |
13864 | int quals = 0; |
13865 | qualified_die_p (die: d, mask: &quals, depth: dwarf_qual_info_size); |
13866 | if (quals == (first_quals | dwarf_qual_info[i].q)) |
13867 | break; |
13868 | if (d == last) |
13869 | { |
13870 | d = NULL; |
13871 | break; |
13872 | } |
13873 | } |
13874 | if (d) |
13875 | { |
13876 | mod_type_die = d; |
13877 | continue; |
13878 | } |
13879 | } |
13880 | if (first) |
13881 | { |
13882 | d = new_die_raw (tag_value: dwarf_qual_info[i].t); |
13883 | add_child_die_after (die: mod_scope, child_die: d, after_die: last); |
13884 | last = d; |
13885 | } |
13886 | else |
13887 | d = new_die (tag_value: dwarf_qual_info[i].t, parent_die: mod_scope, t: type); |
13888 | if (mod_type_die) |
13889 | add_AT_die_ref (die: d, attr_kind: DW_AT_type, targ_die: mod_type_die); |
13890 | mod_type_die = d; |
13891 | first_quals |= dwarf_qual_info[i].q; |
13892 | } |
13893 | } |
13894 | else if (code == POINTER_TYPE || code == REFERENCE_TYPE) |
13895 | { |
13896 | dwarf_tag tag = DW_TAG_pointer_type; |
13897 | if (code == REFERENCE_TYPE) |
13898 | { |
13899 | if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4) |
13900 | tag = DW_TAG_rvalue_reference_type; |
13901 | else |
13902 | tag = DW_TAG_reference_type; |
13903 | } |
13904 | mod_type_die = new_die (tag_value: tag, parent_die: mod_scope, t: type); |
13905 | |
13906 | add_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_byte_size, |
13907 | unsigned_val: simple_type_size_in_bits (type) / BITS_PER_UNIT); |
13908 | add_alignment_attribute (mod_type_die, type); |
13909 | item_type = TREE_TYPE (type); |
13910 | |
13911 | addr_space_t as = TYPE_ADDR_SPACE (item_type); |
13912 | if (!ADDR_SPACE_GENERIC_P (as)) |
13913 | { |
13914 | int action = targetm.addr_space.debug (as); |
13915 | if (action >= 0) |
13916 | { |
13917 | /* Positive values indicate an address_class. */ |
13918 | add_AT_unsigned (die: mod_type_die, attr_kind: DW_AT_address_class, unsigned_val: action); |
13919 | } |
13920 | else |
13921 | { |
13922 | /* Negative values indicate an (inverted) segment base reg. */ |
13923 | dw_loc_descr_ref d |
13924 | = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED); |
13925 | add_AT_loc (die: mod_type_die, attr_kind: DW_AT_segment, loc: d); |
13926 | } |
13927 | } |
13928 | } |
13929 | else if (code == ARRAY_TYPE |
13930 | || (lang_hooks.types.get_array_descr_info |
13931 | && lang_hooks.types.get_array_descr_info (type, &info))) |
13932 | { |
13933 | gen_type_die (type, context_die); |
13934 | return lookup_type_die (type); |
13935 | } |
13936 | else if (code == INTEGER_TYPE |
13937 | && TREE_TYPE (type) != NULL_TREE |
13938 | && subrange_type_for_debug_p (type, &low, &high)) |
13939 | { |
13940 | tree bias = NULL_TREE; |
13941 | if (lang_hooks.types.get_type_bias) |
13942 | bias = lang_hooks.types.get_type_bias (type); |
13943 | mod_type_die = subrange_type_die (type, low, high, bias, context_die); |
13944 | item_type = TREE_TYPE (type); |
13945 | } |
13946 | else if (is_base_type (type)) |
13947 | { |
13948 | /* If a target supports long double as different floating point |
13949 | modes with the same 16-byte size, use normal DW_TAG_base_type |
13950 | only for the composite (ibm_extended_real_format) type and |
13951 | for the other for the time being emit instead a "_Float128" |
13952 | or "complex _Float128" DW_TAG_base_type and a "long double" |
13953 | or "complex long double" typedef to it. */ |
13954 | if (tree other_type = long_double_as_float128 (type)) |
13955 | { |
13956 | dw_die_ref other_die; |
13957 | if (TYPE_NAME (other_type)) |
13958 | other_die |
13959 | = modified_type_die (type: other_type, cv_quals: TYPE_UNQUALIFIED, reverse, |
13960 | context_die); |
13961 | else |
13962 | { |
13963 | other_die = base_type_die (type, reverse); |
13964 | add_child_die (die: comp_unit_die (), child_die: other_die); |
13965 | add_name_attribute (other_die, |
13966 | TREE_CODE (type) == COMPLEX_TYPE |
13967 | ? "complex _Float128" : "_Float128" ); |
13968 | } |
13969 | mod_type_die = new_die_raw (tag_value: DW_TAG_typedef); |
13970 | add_AT_die_ref (die: mod_type_die, attr_kind: DW_AT_type, targ_die: other_die); |
13971 | } |
13972 | else |
13973 | mod_type_die = base_type_die (type, reverse); |
13974 | |
13975 | /* The DIE with DW_AT_endianity is placed right after the naked DIE. */ |
13976 | if (reverse_type) |
13977 | { |
13978 | dw_die_ref after_die |
13979 | = modified_type_die (type, cv_quals, reverse: false, context_die); |
13980 | add_child_die_after (die: comp_unit_die (), child_die: mod_type_die, after_die); |
13981 | } |
13982 | else |
13983 | add_child_die (die: comp_unit_die (), child_die: mod_type_die); |
13984 | |
13985 | add_pubtype (decl: type, die: mod_type_die); |
13986 | } |
13987 | else |
13988 | { |
13989 | /* The DIE with DW_AT_endianity is placed right after the naked DIE. */ |
13990 | if (reverse_type) |
13991 | { |
13992 | dw_die_ref after_die |
13993 | = modified_type_die (type, cv_quals, reverse: false, context_die); |
13994 | gen_type_die (type, context_die, true); |
13995 | gcc_assert (after_die->die_sib |
13996 | && get_AT_unsigned (after_die->die_sib, DW_AT_endianity)); |
13997 | return after_die->die_sib; |
13998 | } |
13999 | |
14000 | gen_type_die (type, context_die); |
14001 | |
14002 | /* We have to get the type_main_variant here (and pass that to the |
14003 | `lookup_type_die' routine) because the ..._TYPE node we have |
14004 | might simply be a *copy* of some original type node (where the |
14005 | copy was created to help us keep track of typedef names) and |
14006 | that copy might have a different TYPE_UID from the original |
14007 | ..._TYPE node. */ |
14008 | if (code == FUNCTION_TYPE || code == METHOD_TYPE) |
14009 | { |
14010 | /* For function/method types, can't just use type_main_variant here, |
14011 | because that can have different ref-qualifiers for C++, |
14012 | but try to canonicalize. */ |
14013 | tree main = TYPE_MAIN_VARIANT (type); |
14014 | for (tree t = main; t; t = TYPE_NEXT_VARIANT (t)) |
14015 | if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0 |
14016 | && check_base_type (cand: t, base: main) |
14017 | && check_lang_type (cand: t, base: type)) |
14018 | return lookup_type_die (type: t); |
14019 | return lookup_type_die (type); |
14020 | } |
14021 | /* Vectors have the debugging information in the type, |
14022 | not the main variant. */ |
14023 | else if (code == VECTOR_TYPE) |
14024 | return lookup_type_die (type); |
14025 | else |
14026 | return lookup_type_die (type: type_main_variant (type)); |
14027 | } |
14028 | |
14029 | /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those, |
14030 | don't output a DW_TAG_typedef, since there isn't one in the |
14031 | user's program; just attach a DW_AT_name to the type. |
14032 | Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type |
14033 | if the base type already has the same name. */ |
14034 | if (name |
14035 | && ((TREE_CODE (name) != TYPE_DECL |
14036 | && (qualified_type == TYPE_MAIN_VARIANT (type) |
14037 | || (cv_quals == TYPE_UNQUALIFIED))) |
14038 | || (TREE_CODE (name) == TYPE_DECL |
14039 | && TREE_TYPE (name) == qualified_type |
14040 | && DECL_NAME (name)))) |
14041 | { |
14042 | if (TREE_CODE (name) == TYPE_DECL) |
14043 | /* Could just call add_name_and_src_coords_attributes here, |
14044 | but since this is a builtin type it doesn't have any |
14045 | useful source coordinates anyway. */ |
14046 | name = DECL_NAME (name); |
14047 | add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name)); |
14048 | } |
14049 | else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type) |
14050 | { |
14051 | if (TREE_CODE (type) == BITINT_TYPE) |
14052 | { |
14053 | char name_buf[sizeof ("unsigned _BitInt(2147483647)" )]; |
14054 | snprintf (s: name_buf, maxlen: sizeof (name_buf), |
14055 | format: "%s_BitInt(%d)" , TYPE_UNSIGNED (type) ? "unsigned " : "" , |
14056 | TYPE_PRECISION (type)); |
14057 | add_name_attribute (mod_type_die, name_buf); |
14058 | } |
14059 | else |
14060 | { |
14061 | /* This probably indicates a bug. */ |
14062 | name = TYPE_IDENTIFIER (type); |
14063 | add_name_attribute (mod_type_die, |
14064 | name |
14065 | ? IDENTIFIER_POINTER (name) : "__unknown__" ); |
14066 | } |
14067 | } |
14068 | |
14069 | if (qualified_type && !reverse_type) |
14070 | equate_type_number_to_die (type: qualified_type, type_die: mod_type_die); |
14071 | |
14072 | if (item_type) |
14073 | /* We must do this after the equate_type_number_to_die call, in case |
14074 | this is a recursive type. This ensures that the modified_type_die |
14075 | recursion will terminate even if the type is recursive. Recursive |
14076 | types are possible in Ada. */ |
14077 | sub_die = modified_type_die (type: item_type, |
14078 | TYPE_QUALS_NO_ADDR_SPACE (item_type), |
14079 | reverse, |
14080 | context_die); |
14081 | |
14082 | if (sub_die != NULL) |
14083 | add_AT_die_ref (die: mod_type_die, attr_kind: DW_AT_type, targ_die: sub_die); |
14084 | |
14085 | add_gnat_descriptive_type_attribute (mod_type_die, type, context_die); |
14086 | if (TYPE_ARTIFICIAL (type)) |
14087 | add_AT_flag (die: mod_type_die, attr_kind: DW_AT_artificial, flag: 1); |
14088 | |
14089 | return mod_type_die; |
14090 | } |
14091 | |
14092 | /* Generate DIEs for the generic parameters of T. |
14093 | T must be either a generic type or a generic function. |
14094 | See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */ |
14095 | |
14096 | static void |
14097 | gen_generic_params_dies (tree t) |
14098 | { |
14099 | tree parms, args; |
14100 | int parms_num, i; |
14101 | dw_die_ref die = NULL; |
14102 | int non_default; |
14103 | |
14104 | if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t))) |
14105 | return; |
14106 | |
14107 | if (TYPE_P (t)) |
14108 | die = lookup_type_die (type: t); |
14109 | else if (DECL_P (t)) |
14110 | die = lookup_decl_die (decl: t); |
14111 | |
14112 | gcc_assert (die); |
14113 | |
14114 | parms = lang_hooks.get_innermost_generic_parms (t); |
14115 | if (!parms) |
14116 | /* T has no generic parameter. It means T is neither a generic type |
14117 | or function. End of story. */ |
14118 | return; |
14119 | |
14120 | parms_num = TREE_VEC_LENGTH (parms); |
14121 | args = lang_hooks.get_innermost_generic_args (t); |
14122 | if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST) |
14123 | non_default = int_cst_value (TREE_CHAIN (args)); |
14124 | else |
14125 | non_default = TREE_VEC_LENGTH (args); |
14126 | for (i = 0; i < parms_num; i++) |
14127 | { |
14128 | tree parm, arg, arg_pack_elems; |
14129 | dw_die_ref parm_die; |
14130 | |
14131 | parm = TREE_VEC_ELT (parms, i); |
14132 | arg = TREE_VEC_ELT (args, i); |
14133 | arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg); |
14134 | gcc_assert (parm && TREE_VALUE (parm) && arg); |
14135 | |
14136 | if (parm && TREE_VALUE (parm) && arg) |
14137 | { |
14138 | /* If PARM represents a template parameter pack, |
14139 | emit a DW_TAG_GNU_template_parameter_pack DIE, followed |
14140 | by DW_TAG_template_*_parameter DIEs for the argument |
14141 | pack elements of ARG. Note that ARG would then be |
14142 | an argument pack. */ |
14143 | if (arg_pack_elems) |
14144 | parm_die = template_parameter_pack_die (TREE_VALUE (parm), |
14145 | arg_pack_elems, |
14146 | die); |
14147 | else |
14148 | parm_die = generic_parameter_die (TREE_VALUE (parm), arg, |
14149 | true /* emit name */, die); |
14150 | if (i >= non_default) |
14151 | add_AT_flag (die: parm_die, attr_kind: DW_AT_default_value, flag: 1); |
14152 | } |
14153 | } |
14154 | } |
14155 | |
14156 | /* Create and return a DIE for PARM which should be |
14157 | the representation of a generic type parameter. |
14158 | For instance, in the C++ front end, PARM would be a template parameter. |
14159 | ARG is the argument to PARM. |
14160 | EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the |
14161 | name of the PARM. |
14162 | PARENT_DIE is the parent DIE which the new created DIE should be added to, |
14163 | as a child node. */ |
14164 | |
14165 | static dw_die_ref |
14166 | generic_parameter_die (tree parm, tree arg, |
14167 | bool emit_name_p, |
14168 | dw_die_ref parent_die) |
14169 | { |
14170 | dw_die_ref tmpl_die = NULL; |
14171 | const char *name = NULL; |
14172 | |
14173 | /* C++20 accepts class literals as template parameters, and var |
14174 | decls with initializers represent them. The VAR_DECLs would be |
14175 | rejected, but we can take the DECL_INITIAL constructor and |
14176 | attempt to expand it. */ |
14177 | if (arg && VAR_P (arg)) |
14178 | arg = DECL_INITIAL (arg); |
14179 | |
14180 | if (!parm || !DECL_NAME (parm) || !arg) |
14181 | return NULL; |
14182 | |
14183 | /* We support non-type generic parameters and arguments, |
14184 | type generic parameters and arguments, as well as |
14185 | generic generic parameters (a.k.a. template template parameters in C++) |
14186 | and arguments. */ |
14187 | if (TREE_CODE (parm) == PARM_DECL) |
14188 | /* PARM is a nontype generic parameter */ |
14189 | tmpl_die = new_die (tag_value: DW_TAG_template_value_param, parent_die, t: parm); |
14190 | else if (TREE_CODE (parm) == TYPE_DECL) |
14191 | /* PARM is a type generic parameter. */ |
14192 | tmpl_die = new_die (tag_value: DW_TAG_template_type_param, parent_die, t: parm); |
14193 | else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm)) |
14194 | /* PARM is a generic generic parameter. |
14195 | Its DIE is a GNU extension. It shall have a |
14196 | DW_AT_name attribute to represent the name of the template template |
14197 | parameter, and a DW_AT_GNU_template_name attribute to represent the |
14198 | name of the template template argument. */ |
14199 | tmpl_die = new_die (tag_value: DW_TAG_GNU_template_template_param, |
14200 | parent_die, t: parm); |
14201 | else |
14202 | gcc_unreachable (); |
14203 | |
14204 | if (tmpl_die) |
14205 | { |
14206 | tree tmpl_type; |
14207 | |
14208 | /* If PARM is a generic parameter pack, it means we are |
14209 | emitting debug info for a template argument pack element. |
14210 | In other terms, ARG is a template argument pack element. |
14211 | In that case, we don't emit any DW_AT_name attribute for |
14212 | the die. */ |
14213 | if (emit_name_p) |
14214 | { |
14215 | name = IDENTIFIER_POINTER (DECL_NAME (parm)); |
14216 | gcc_assert (name); |
14217 | add_AT_string (die: tmpl_die, attr_kind: DW_AT_name, str: name); |
14218 | } |
14219 | |
14220 | if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm)) |
14221 | { |
14222 | /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter |
14223 | TMPL_DIE should have a child DW_AT_type attribute that is set |
14224 | to the type of the argument to PARM, which is ARG. |
14225 | If PARM is a type generic parameter, TMPL_DIE should have a |
14226 | child DW_AT_type that is set to ARG. */ |
14227 | tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg); |
14228 | add_type_attribute (tmpl_die, tmpl_type, |
14229 | (TREE_THIS_VOLATILE (tmpl_type) |
14230 | ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED), |
14231 | false, parent_die); |
14232 | } |
14233 | else |
14234 | { |
14235 | /* So TMPL_DIE is a DIE representing a |
14236 | a generic generic template parameter, a.k.a template template |
14237 | parameter in C++ and arg is a template. */ |
14238 | |
14239 | /* The DW_AT_GNU_template_name attribute of the DIE must be set |
14240 | to the name of the argument. */ |
14241 | name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, scope: 1); |
14242 | if (name) |
14243 | add_AT_string (die: tmpl_die, attr_kind: DW_AT_GNU_template_name, str: name); |
14244 | } |
14245 | |
14246 | if (TREE_CODE (parm) == PARM_DECL) |
14247 | /* So PARM is a non-type generic parameter. |
14248 | DWARF3 5.6.8 says we must set a DW_AT_const_value child |
14249 | attribute of TMPL_DIE which value represents the value |
14250 | of ARG. |
14251 | We must be careful here: |
14252 | The value of ARG might reference some function decls. |
14253 | We might currently be emitting debug info for a generic |
14254 | type and types are emitted before function decls, we don't |
14255 | know if the function decls referenced by ARG will actually be |
14256 | emitted after cgraph computations. |
14257 | So must defer the generation of the DW_AT_const_value to |
14258 | after cgraph is ready. */ |
14259 | append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg); |
14260 | } |
14261 | |
14262 | return tmpl_die; |
14263 | } |
14264 | |
14265 | /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing. |
14266 | PARM_PACK must be a template parameter pack. The returned DIE |
14267 | will be child DIE of PARENT_DIE. */ |
14268 | |
14269 | static dw_die_ref |
14270 | template_parameter_pack_die (tree parm_pack, |
14271 | tree parm_pack_args, |
14272 | dw_die_ref parent_die) |
14273 | { |
14274 | dw_die_ref die; |
14275 | int j; |
14276 | |
14277 | gcc_assert (parent_die && parm_pack); |
14278 | |
14279 | die = new_die (tag_value: DW_TAG_GNU_template_parameter_pack, parent_die, t: parm_pack); |
14280 | add_name_and_src_coords_attributes (die, parm_pack); |
14281 | for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++) |
14282 | generic_parameter_die (parm: parm_pack, |
14283 | TREE_VEC_ELT (parm_pack_args, j), |
14284 | emit_name_p: false /* Don't emit DW_AT_name */, |
14285 | parent_die: die); |
14286 | return die; |
14287 | } |
14288 | |
14289 | /* Return the debugger register number described by a given RTL node. */ |
14290 | |
14291 | static unsigned int |
14292 | debugger_reg_number (const_rtx rtl) |
14293 | { |
14294 | unsigned regno = REGNO (rtl); |
14295 | |
14296 | gcc_assert (regno < FIRST_PSEUDO_REGISTER); |
14297 | |
14298 | #ifdef LEAF_REG_REMAP |
14299 | if (crtl->uses_only_leaf_regs) |
14300 | { |
14301 | int leaf_reg = LEAF_REG_REMAP (regno); |
14302 | if (leaf_reg != -1) |
14303 | regno = (unsigned) leaf_reg; |
14304 | } |
14305 | #endif |
14306 | |
14307 | regno = DEBUGGER_REGNO (regno); |
14308 | gcc_assert (regno != INVALID_REGNUM); |
14309 | return regno; |
14310 | } |
14311 | |
14312 | /* Optionally add a DW_OP_piece term to a location description expression. |
14313 | DW_OP_piece is only added if the location description expression already |
14314 | doesn't end with DW_OP_piece. */ |
14315 | |
14316 | static void |
14317 | add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size) |
14318 | { |
14319 | dw_loc_descr_ref loc; |
14320 | |
14321 | if (*list_head != NULL) |
14322 | { |
14323 | /* Find the end of the chain. */ |
14324 | for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next) |
14325 | ; |
14326 | |
14327 | if (loc->dw_loc_opc != DW_OP_piece) |
14328 | loc->dw_loc_next = new_loc_descr (op: DW_OP_piece, oprnd1: size, oprnd2: 0); |
14329 | } |
14330 | } |
14331 | |
14332 | /* Return a location descriptor that designates a machine register or |
14333 | zero if there is none. */ |
14334 | |
14335 | static dw_loc_descr_ref |
14336 | reg_loc_descriptor (rtx rtl, enum var_init_status initialized) |
14337 | { |
14338 | rtx regs; |
14339 | |
14340 | if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
14341 | return 0; |
14342 | |
14343 | /* We only use "frame base" when we're sure we're talking about the |
14344 | post-prologue local stack frame. We do this by *not* running |
14345 | register elimination until this point, and recognizing the special |
14346 | argument pointer and soft frame pointer rtx's. |
14347 | Use DW_OP_fbreg offset DW_OP_stack_value in this case. */ |
14348 | if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx) |
14349 | && (ira_use_lra_p |
14350 | ? lra_eliminate_regs (rtl, VOIDmode, NULL_RTX) |
14351 | : eliminate_regs (rtl, VOIDmode, NULL_RTX)) != rtl) |
14352 | { |
14353 | dw_loc_descr_ref result = NULL; |
14354 | |
14355 | if (dwarf_version >= 4 || !dwarf_strict) |
14356 | { |
14357 | result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode, |
14358 | initialized); |
14359 | if (result) |
14360 | add_loc_descr (list_head: &result, |
14361 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
14362 | } |
14363 | return result; |
14364 | } |
14365 | |
14366 | regs = targetm.dwarf_register_span (rtl); |
14367 | |
14368 | if (REG_NREGS (rtl) > 1 || regs) |
14369 | return multiple_reg_loc_descriptor (rtl, regs, initialized); |
14370 | else |
14371 | { |
14372 | unsigned int debugger_regnum = debugger_reg_number (rtl); |
14373 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
14374 | return 0; |
14375 | return one_reg_loc_descriptor (debugger_regnum, initialized); |
14376 | } |
14377 | } |
14378 | |
14379 | /* Return a location descriptor that designates a machine register for |
14380 | a given hard register number. */ |
14381 | |
14382 | static dw_loc_descr_ref |
14383 | one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized) |
14384 | { |
14385 | dw_loc_descr_ref reg_loc_descr; |
14386 | |
14387 | if (regno <= 31) |
14388 | reg_loc_descr |
14389 | = new_loc_descr (op: (enum dwarf_location_atom) (DW_OP_reg0 + regno), oprnd1: 0, oprnd2: 0); |
14390 | else |
14391 | reg_loc_descr = new_loc_descr (op: DW_OP_regx, oprnd1: regno, oprnd2: 0); |
14392 | |
14393 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
14394 | add_loc_descr (list_head: ®_loc_descr, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
14395 | |
14396 | return reg_loc_descr; |
14397 | } |
14398 | |
14399 | /* Given an RTL of a register, return a location descriptor that |
14400 | designates a value that spans more than one register. */ |
14401 | |
14402 | static dw_loc_descr_ref |
14403 | multiple_reg_loc_descriptor (rtx rtl, rtx regs, |
14404 | enum var_init_status initialized) |
14405 | { |
14406 | int size, i; |
14407 | dw_loc_descr_ref loc_result = NULL; |
14408 | |
14409 | /* Simple, contiguous registers. */ |
14410 | if (regs == NULL_RTX) |
14411 | { |
14412 | unsigned reg = REGNO (rtl); |
14413 | int nregs; |
14414 | |
14415 | #ifdef LEAF_REG_REMAP |
14416 | if (crtl->uses_only_leaf_regs) |
14417 | { |
14418 | int leaf_reg = LEAF_REG_REMAP (reg); |
14419 | if (leaf_reg != -1) |
14420 | reg = (unsigned) leaf_reg; |
14421 | } |
14422 | #endif |
14423 | |
14424 | gcc_assert ((unsigned) DEBUGGER_REGNO (reg) == debugger_reg_number (rtl)); |
14425 | nregs = REG_NREGS (rtl); |
14426 | |
14427 | /* At present we only track constant-sized pieces. */ |
14428 | if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (const_value: &size)) |
14429 | return NULL; |
14430 | size /= nregs; |
14431 | |
14432 | loc_result = NULL; |
14433 | while (nregs--) |
14434 | { |
14435 | dw_loc_descr_ref t; |
14436 | |
14437 | t = one_reg_loc_descriptor (DEBUGGER_REGNO (reg), |
14438 | initialized: VAR_INIT_STATUS_INITIALIZED); |
14439 | add_loc_descr (list_head: &loc_result, descr: t); |
14440 | add_loc_descr_op_piece (list_head: &loc_result, size); |
14441 | ++reg; |
14442 | } |
14443 | return loc_result; |
14444 | } |
14445 | |
14446 | /* Now onto stupid register sets in non contiguous locations. */ |
14447 | |
14448 | gcc_assert (GET_CODE (regs) == PARALLEL); |
14449 | |
14450 | /* At present we only track constant-sized pieces. */ |
14451 | if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (const_value: &size)) |
14452 | return NULL; |
14453 | loc_result = NULL; |
14454 | |
14455 | for (i = 0; i < XVECLEN (regs, 0); ++i) |
14456 | { |
14457 | dw_loc_descr_ref t; |
14458 | |
14459 | t = one_reg_loc_descriptor (regno: debugger_reg_number (XVECEXP (regs, 0, i)), |
14460 | initialized: VAR_INIT_STATUS_INITIALIZED); |
14461 | add_loc_descr (list_head: &loc_result, descr: t); |
14462 | add_loc_descr_op_piece (list_head: &loc_result, size); |
14463 | } |
14464 | |
14465 | if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
14466 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
14467 | return loc_result; |
14468 | } |
14469 | |
14470 | static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT); |
14471 | |
14472 | /* Return a location descriptor that designates a constant i, |
14473 | as a compound operation from constant (i >> shift), constant shift |
14474 | and DW_OP_shl. */ |
14475 | |
14476 | static dw_loc_descr_ref |
14477 | int_shift_loc_descriptor (HOST_WIDE_INT i, int shift) |
14478 | { |
14479 | dw_loc_descr_ref ret = int_loc_descriptor (i >> shift); |
14480 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (shift)); |
14481 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
14482 | return ret; |
14483 | } |
14484 | |
14485 | /* Return a location descriptor that designates constant POLY_I. */ |
14486 | |
14487 | static dw_loc_descr_ref |
14488 | int_loc_descriptor (poly_int64 poly_i) |
14489 | { |
14490 | enum dwarf_location_atom op; |
14491 | |
14492 | HOST_WIDE_INT i; |
14493 | if (!poly_i.is_constant (const_value: &i)) |
14494 | { |
14495 | /* Create location descriptions for the non-constant part and |
14496 | add any constant offset at the end. */ |
14497 | dw_loc_descr_ref ret = NULL; |
14498 | HOST_WIDE_INT constant = poly_i.coeffs[0]; |
14499 | for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j) |
14500 | { |
14501 | HOST_WIDE_INT coeff = poly_i.coeffs[j]; |
14502 | if (coeff != 0) |
14503 | { |
14504 | dw_loc_descr_ref start = ret; |
14505 | unsigned int factor; |
14506 | int bias; |
14507 | unsigned int regno = targetm.dwarf_poly_indeterminate_value |
14508 | (j, &factor, &bias); |
14509 | |
14510 | /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value: |
14511 | add COEFF * (REGNO / FACTOR) now and subtract |
14512 | COEFF * BIAS from the final constant part. */ |
14513 | constant -= coeff * bias; |
14514 | add_loc_descr (list_head: &ret, descr: new_reg_loc_descr (reg: regno, offset: 0)); |
14515 | if (coeff % factor == 0) |
14516 | coeff /= factor; |
14517 | else |
14518 | { |
14519 | int amount = exact_log2 (x: factor); |
14520 | gcc_assert (amount >= 0); |
14521 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (poly_i: amount)); |
14522 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
14523 | } |
14524 | if (coeff != 1) |
14525 | { |
14526 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (poly_i: coeff)); |
14527 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
14528 | } |
14529 | if (start) |
14530 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
14531 | } |
14532 | } |
14533 | loc_descr_plus_const (list_head: &ret, poly_offset: constant); |
14534 | return ret; |
14535 | } |
14536 | |
14537 | /* Pick the smallest representation of a constant, rather than just |
14538 | defaulting to the LEB encoding. */ |
14539 | if (i >= 0) |
14540 | { |
14541 | int clz = clz_hwi (x: i); |
14542 | int ctz = ctz_hwi (x: i); |
14543 | if (i <= 31) |
14544 | op = (enum dwarf_location_atom) (DW_OP_lit0 + i); |
14545 | else if (i <= 0xff) |
14546 | op = DW_OP_const1u; |
14547 | else if (i <= 0xffff) |
14548 | op = DW_OP_const2u; |
14549 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5 |
14550 | && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT) |
14551 | /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and |
14552 | DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes, |
14553 | while DW_OP_const4u is 5 bytes. */ |
14554 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5); |
14555 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
14556 | && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT) |
14557 | /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes, |
14558 | while DW_OP_const4u is 5 bytes. */ |
14559 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8); |
14560 | |
14561 | else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff |
14562 | && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i) |
14563 | <= 4) |
14564 | { |
14565 | /* As i >= 2**31, the double cast above will yield a negative number. |
14566 | Since wrapping is defined in DWARF expressions we can output big |
14567 | positive integers as small negative ones, regardless of the size |
14568 | of host wide ints. |
14569 | |
14570 | Here, since the evaluator will handle 32-bit values and since i >= |
14571 | 2**31, we know it's going to be interpreted as a negative literal: |
14572 | store it this way if we can do better than 5 bytes this way. */ |
14573 | return int_loc_descriptor (poly_i: (HOST_WIDE_INT) (int32_t) i); |
14574 | } |
14575 | else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
14576 | op = DW_OP_const4u; |
14577 | |
14578 | /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at |
14579 | least 6 bytes: see if we can do better before falling back to it. */ |
14580 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
14581 | && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT) |
14582 | /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */ |
14583 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8); |
14584 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16 |
14585 | && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31) |
14586 | >= HOST_BITS_PER_WIDE_INT) |
14587 | /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes, |
14588 | DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */ |
14589 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16); |
14590 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32 |
14591 | && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT |
14592 | && size_of_uleb128 (i) > 6) |
14593 | /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */ |
14594 | return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32); |
14595 | else |
14596 | op = DW_OP_constu; |
14597 | } |
14598 | else |
14599 | { |
14600 | if (i >= -0x80) |
14601 | op = DW_OP_const1s; |
14602 | else if (i >= -0x8000) |
14603 | op = DW_OP_const2s; |
14604 | else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000) |
14605 | { |
14606 | if (size_of_int_loc_descriptor (i) < 5) |
14607 | { |
14608 | dw_loc_descr_ref ret = int_loc_descriptor (poly_i: -i); |
14609 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
14610 | return ret; |
14611 | } |
14612 | op = DW_OP_const4s; |
14613 | } |
14614 | else |
14615 | { |
14616 | if (size_of_int_loc_descriptor (i) |
14617 | < (unsigned long) 1 + size_of_sleb128 (i)) |
14618 | { |
14619 | dw_loc_descr_ref ret = int_loc_descriptor (poly_i: -i); |
14620 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
14621 | return ret; |
14622 | } |
14623 | op = DW_OP_consts; |
14624 | } |
14625 | } |
14626 | |
14627 | return new_loc_descr (op, oprnd1: i, oprnd2: 0); |
14628 | } |
14629 | |
14630 | /* Likewise, for unsigned constants. */ |
14631 | |
14632 | static dw_loc_descr_ref |
14633 | uint_loc_descriptor (unsigned HOST_WIDE_INT i) |
14634 | { |
14635 | const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT); |
14636 | const unsigned HOST_WIDE_INT max_uint |
14637 | = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT); |
14638 | |
14639 | /* If possible, use the clever signed constants handling. */ |
14640 | if (i <= max_int) |
14641 | return int_loc_descriptor (poly_i: (HOST_WIDE_INT) i); |
14642 | |
14643 | /* Here, we are left with positive numbers that cannot be represented as |
14644 | HOST_WIDE_INT, i.e.: |
14645 | max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT) |
14646 | |
14647 | Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes |
14648 | whereas may be better to output a negative integer: thanks to integer |
14649 | wrapping, we know that: |
14650 | x = x - 2 ** DWARF2_ADDR_SIZE |
14651 | = x - 2 * (max (HOST_WIDE_INT) + 1) |
14652 | So numbers close to max (unsigned HOST_WIDE_INT) could be represented as |
14653 | small negative integers. Let's try that in cases it will clearly improve |
14654 | the encoding: there is no gain turning DW_OP_const4u into |
14655 | DW_OP_const4s. */ |
14656 | if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT |
14657 | && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000) |
14658 | || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000))) |
14659 | { |
14660 | const unsigned HOST_WIDE_INT first_shift = i - max_int - 1; |
14661 | |
14662 | /* Now, -1 < first_shift <= max (HOST_WIDE_INT) |
14663 | i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */ |
14664 | const HOST_WIDE_INT second_shift |
14665 | = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1; |
14666 | |
14667 | /* So we finally have: |
14668 | -max (HOST_WIDE_INT) - 1 <= second_shift <= -1. |
14669 | i.e. min (HOST_WIDE_INT) <= second_shift < 0. */ |
14670 | return int_loc_descriptor (poly_i: second_shift); |
14671 | } |
14672 | |
14673 | /* Last chance: fallback to a simple constant operation. */ |
14674 | return new_loc_descr |
14675 | (op: (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
14676 | ? DW_OP_const4u |
14677 | : DW_OP_const8u, |
14678 | oprnd1: i, oprnd2: 0); |
14679 | } |
14680 | |
14681 | /* Generate and return a location description that computes the unsigned |
14682 | comparison of the two stack top entries (a OP b where b is the top-most |
14683 | entry and a is the second one). The KIND of comparison can be LT_EXPR, |
14684 | LE_EXPR, GT_EXPR or GE_EXPR. */ |
14685 | |
14686 | static dw_loc_descr_ref |
14687 | uint_comparison_loc_list (enum tree_code kind) |
14688 | { |
14689 | enum dwarf_location_atom op, flip_op; |
14690 | dw_loc_descr_ref ret, bra_node, jmp_node, tmp; |
14691 | |
14692 | switch (kind) |
14693 | { |
14694 | case LT_EXPR: |
14695 | op = DW_OP_lt; |
14696 | break; |
14697 | case LE_EXPR: |
14698 | op = DW_OP_le; |
14699 | break; |
14700 | case GT_EXPR: |
14701 | op = DW_OP_gt; |
14702 | break; |
14703 | case GE_EXPR: |
14704 | op = DW_OP_ge; |
14705 | break; |
14706 | default: |
14707 | gcc_unreachable (); |
14708 | } |
14709 | |
14710 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
14711 | jmp_node = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
14712 | |
14713 | /* Until DWARFv4, operations all work on signed integers. It is nevertheless |
14714 | possible to perform unsigned comparisons: we just have to distinguish |
14715 | three cases: |
14716 | |
14717 | 1. when a and b have the same sign (as signed integers); then we should |
14718 | return: a OP(signed) b; |
14719 | |
14720 | 2. when a is a negative signed integer while b is a positive one, then a |
14721 | is a greater unsigned integer than b; likewise when a and b's roles |
14722 | are flipped. |
14723 | |
14724 | So first, compare the sign of the two operands. */ |
14725 | ret = new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0); |
14726 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
14727 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_xor, oprnd1: 0, oprnd2: 0)); |
14728 | /* If they have different signs (i.e. they have different sign bits), then |
14729 | the stack top value has now the sign bit set and thus it's smaller than |
14730 | zero. */ |
14731 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_lit0, oprnd1: 0, oprnd2: 0)); |
14732 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_lt, oprnd1: 0, oprnd2: 0)); |
14733 | add_loc_descr (list_head: &ret, descr: bra_node); |
14734 | |
14735 | /* We are in case 1. At this point, we know both operands have the same |
14736 | sign, to it's safe to use the built-in signed comparison. */ |
14737 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
14738 | add_loc_descr (list_head: &ret, descr: jmp_node); |
14739 | |
14740 | /* We are in case 2. Here, we know both operands do not have the same sign, |
14741 | so we have to flip the signed comparison. */ |
14742 | flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt; |
14743 | tmp = new_loc_descr (op: flip_op, oprnd1: 0, oprnd2: 0); |
14744 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
14745 | bra_node->dw_loc_oprnd1.v.val_loc = tmp; |
14746 | add_loc_descr (list_head: &ret, descr: tmp); |
14747 | |
14748 | /* This dummy operation is necessary to make the two branches join. */ |
14749 | tmp = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
14750 | jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
14751 | jmp_node->dw_loc_oprnd1.v.val_loc = tmp; |
14752 | add_loc_descr (list_head: &ret, descr: tmp); |
14753 | |
14754 | return ret; |
14755 | } |
14756 | |
14757 | /* Likewise, but takes the location description lists (might be destructive on |
14758 | them). Return NULL if either is NULL or if concatenation fails. */ |
14759 | |
14760 | static dw_loc_list_ref |
14761 | loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right, |
14762 | enum tree_code kind) |
14763 | { |
14764 | if (left == NULL || right == NULL) |
14765 | return NULL; |
14766 | |
14767 | add_loc_list (ret: &left, list: right); |
14768 | if (left == NULL) |
14769 | return NULL; |
14770 | |
14771 | add_loc_descr_to_each (list: left, ref: uint_comparison_loc_list (kind)); |
14772 | return left; |
14773 | } |
14774 | |
14775 | /* Return size_of_locs (int_shift_loc_descriptor (i, shift)) |
14776 | without actually allocating it. */ |
14777 | |
14778 | static unsigned long |
14779 | size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift) |
14780 | { |
14781 | return size_of_int_loc_descriptor (i >> shift) |
14782 | + size_of_int_loc_descriptor (shift) |
14783 | + 1; |
14784 | } |
14785 | |
14786 | /* Return size_of_locs (int_loc_descriptor (i)) without |
14787 | actually allocating it. */ |
14788 | |
14789 | static unsigned long |
14790 | size_of_int_loc_descriptor (HOST_WIDE_INT i) |
14791 | { |
14792 | unsigned long s; |
14793 | |
14794 | if (i >= 0) |
14795 | { |
14796 | int clz, ctz; |
14797 | if (i <= 31) |
14798 | return 1; |
14799 | else if (i <= 0xff) |
14800 | return 2; |
14801 | else if (i <= 0xffff) |
14802 | return 3; |
14803 | clz = clz_hwi (x: i); |
14804 | ctz = ctz_hwi (x: i); |
14805 | if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5 |
14806 | && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT) |
14807 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14808 | - clz - 5); |
14809 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
14810 | && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT) |
14811 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14812 | - clz - 8); |
14813 | else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff |
14814 | && size_of_int_loc_descriptor (i: (HOST_WIDE_INT) (int32_t) i) |
14815 | <= 4) |
14816 | return size_of_int_loc_descriptor (i: (HOST_WIDE_INT) (int32_t) i); |
14817 | else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff) |
14818 | return 5; |
14819 | s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i); |
14820 | if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8 |
14821 | && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT) |
14822 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14823 | - clz - 8); |
14824 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16 |
14825 | && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT) |
14826 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14827 | - clz - 16); |
14828 | else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32 |
14829 | && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT |
14830 | && s > 6) |
14831 | return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT |
14832 | - clz - 32); |
14833 | else |
14834 | return 1 + s; |
14835 | } |
14836 | else |
14837 | { |
14838 | if (i >= -0x80) |
14839 | return 2; |
14840 | else if (i >= -0x8000) |
14841 | return 3; |
14842 | else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000) |
14843 | { |
14844 | if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i) |
14845 | { |
14846 | s = size_of_int_loc_descriptor (i: -i) + 1; |
14847 | if (s < 5) |
14848 | return s; |
14849 | } |
14850 | return 5; |
14851 | } |
14852 | else |
14853 | { |
14854 | unsigned long r = 1 + size_of_sleb128 (i); |
14855 | if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i) |
14856 | { |
14857 | s = size_of_int_loc_descriptor (i: -i) + 1; |
14858 | if (s < r) |
14859 | return s; |
14860 | } |
14861 | return r; |
14862 | } |
14863 | } |
14864 | } |
14865 | |
14866 | /* Return loc description representing "address" of integer value. |
14867 | This can appear only as toplevel expression. */ |
14868 | |
14869 | static dw_loc_descr_ref |
14870 | address_of_int_loc_descriptor (int size, HOST_WIDE_INT i) |
14871 | { |
14872 | int litsize; |
14873 | dw_loc_descr_ref loc_result = NULL; |
14874 | |
14875 | if (!(dwarf_version >= 4 || !dwarf_strict)) |
14876 | return NULL; |
14877 | |
14878 | litsize = size_of_int_loc_descriptor (i); |
14879 | /* Determine if DW_OP_stack_value or DW_OP_implicit_value |
14880 | is more compact. For DW_OP_stack_value we need: |
14881 | litsize + 1 (DW_OP_stack_value) |
14882 | and for DW_OP_implicit_value: |
14883 | 1 (DW_OP_implicit_value) + 1 (length) + size. */ |
14884 | if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size) |
14885 | { |
14886 | loc_result = int_loc_descriptor (poly_i: i); |
14887 | add_loc_descr (list_head: &loc_result, |
14888 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
14889 | return loc_result; |
14890 | } |
14891 | |
14892 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
14893 | oprnd1: size, oprnd2: 0); |
14894 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_const; |
14895 | loc_result->dw_loc_oprnd2.v.val_int = i; |
14896 | return loc_result; |
14897 | } |
14898 | |
14899 | /* Return a location descriptor that designates a base+offset location. */ |
14900 | |
14901 | static dw_loc_descr_ref |
14902 | based_loc_descr (rtx reg, poly_int64 offset, |
14903 | enum var_init_status initialized) |
14904 | { |
14905 | unsigned int regno; |
14906 | dw_loc_descr_ref result; |
14907 | dw_fde_ref fde = cfun->fde; |
14908 | |
14909 | /* We only use "frame base" when we're sure we're talking about the |
14910 | post-prologue local stack frame. We do this by *not* running |
14911 | register elimination until this point, and recognizing the special |
14912 | argument pointer and soft frame pointer rtx's. */ |
14913 | if (reg == arg_pointer_rtx || reg == frame_pointer_rtx) |
14914 | { |
14915 | rtx elim = (ira_use_lra_p |
14916 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
14917 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
14918 | |
14919 | if (elim != reg) |
14920 | { |
14921 | /* Allow hard frame pointer here even if frame pointer |
14922 | isn't used since hard frame pointer is encoded with |
14923 | DW_OP_fbreg which uses the DW_AT_frame_base attribute, |
14924 | not hard frame pointer directly. */ |
14925 | elim = strip_offset_and_add (x: elim, offset: &offset); |
14926 | gcc_assert (elim == hard_frame_pointer_rtx |
14927 | || elim == stack_pointer_rtx); |
14928 | |
14929 | /* If drap register is used to align stack, use frame |
14930 | pointer + offset to access stack variables. If stack |
14931 | is aligned without drap, use stack pointer + offset to |
14932 | access stack variables. */ |
14933 | if (crtl->stack_realign_tried |
14934 | && reg == frame_pointer_rtx) |
14935 | { |
14936 | int base_reg |
14937 | = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM) |
14938 | ? HARD_FRAME_POINTER_REGNUM |
14939 | : REGNO (elim)); |
14940 | return new_reg_loc_descr (reg: base_reg, offset); |
14941 | } |
14942 | |
14943 | gcc_assert (frame_pointer_fb_offset_valid); |
14944 | offset += frame_pointer_fb_offset; |
14945 | HOST_WIDE_INT const_offset; |
14946 | if (offset.is_constant (const_value: &const_offset)) |
14947 | return new_loc_descr (op: DW_OP_fbreg, oprnd1: const_offset, oprnd2: 0); |
14948 | else |
14949 | { |
14950 | dw_loc_descr_ref ret = new_loc_descr (op: DW_OP_fbreg, oprnd1: 0, oprnd2: 0); |
14951 | loc_descr_plus_const (list_head: &ret, poly_offset: offset); |
14952 | return ret; |
14953 | } |
14954 | } |
14955 | } |
14956 | |
14957 | regno = REGNO (reg); |
14958 | #ifdef LEAF_REG_REMAP |
14959 | if (crtl->uses_only_leaf_regs) |
14960 | { |
14961 | int leaf_reg = LEAF_REG_REMAP (regno); |
14962 | if (leaf_reg != -1) |
14963 | regno = (unsigned) leaf_reg; |
14964 | } |
14965 | #endif |
14966 | regno = DWARF_FRAME_REGNUM (regno); |
14967 | |
14968 | HOST_WIDE_INT const_offset; |
14969 | if (!optimize && fde |
14970 | && (fde->drap_reg == regno || fde->vdrap_reg == regno) |
14971 | && offset.is_constant (const_value: &const_offset)) |
14972 | { |
14973 | /* Use cfa+offset to represent the location of arguments passed |
14974 | on the stack when drap is used to align stack. |
14975 | Only do this when not optimizing, for optimized code var-tracking |
14976 | is supposed to track where the arguments live and the register |
14977 | used as vdrap or drap in some spot might be used for something |
14978 | else in other part of the routine. */ |
14979 | return new_loc_descr (op: DW_OP_fbreg, oprnd1: const_offset, oprnd2: 0); |
14980 | } |
14981 | |
14982 | result = new_reg_loc_descr (reg: regno, offset); |
14983 | |
14984 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
14985 | add_loc_descr (list_head: &result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
14986 | |
14987 | return result; |
14988 | } |
14989 | |
14990 | /* Return true if this RTL expression describes a base+offset calculation. */ |
14991 | |
14992 | static inline bool |
14993 | is_based_loc (const_rtx rtl) |
14994 | { |
14995 | return (GET_CODE (rtl) == PLUS |
14996 | && ((REG_P (XEXP (rtl, 0)) |
14997 | && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER |
14998 | && CONST_INT_P (XEXP (rtl, 1))))); |
14999 | } |
15000 | |
15001 | /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0) |
15002 | failed. */ |
15003 | |
15004 | static dw_loc_descr_ref |
15005 | tls_mem_loc_descriptor (rtx mem) |
15006 | { |
15007 | tree base; |
15008 | dw_loc_descr_ref loc_result; |
15009 | |
15010 | if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem)) |
15011 | return NULL; |
15012 | |
15013 | base = get_base_address (MEM_EXPR (mem)); |
15014 | if (base == NULL |
15015 | || !VAR_P (base) |
15016 | || !DECL_THREAD_LOCAL_P (base)) |
15017 | return NULL; |
15018 | |
15019 | loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL); |
15020 | if (loc_result == NULL) |
15021 | return NULL; |
15022 | |
15023 | if (maybe_ne (MEM_OFFSET (mem), b: 0)) |
15024 | loc_descr_plus_const (list_head: &loc_result, MEM_OFFSET (mem)); |
15025 | |
15026 | return loc_result; |
15027 | } |
15028 | |
15029 | /* Output debug info about reason why we failed to expand expression as dwarf |
15030 | expression. */ |
15031 | |
15032 | static void |
15033 | expansion_failed (tree expr, rtx rtl, char const *reason) |
15034 | { |
15035 | if (dump_file && (dump_flags & TDF_DETAILS)) |
15036 | { |
15037 | fprintf (stream: dump_file, format: "Failed to expand as dwarf: " ); |
15038 | if (expr) |
15039 | print_generic_expr (dump_file, expr, dump_flags); |
15040 | if (rtl) |
15041 | { |
15042 | fprintf (stream: dump_file, format: "\n" ); |
15043 | print_rtl (dump_file, rtl); |
15044 | } |
15045 | fprintf (stream: dump_file, format: "\nReason: %s\n" , reason); |
15046 | } |
15047 | } |
15048 | |
15049 | /* Helper function for const_ok_for_output. */ |
15050 | |
15051 | static bool |
15052 | const_ok_for_output_1 (rtx rtl) |
15053 | { |
15054 | if (targetm.const_not_ok_for_debug_p (rtl)) |
15055 | { |
15056 | if (GET_CODE (rtl) != UNSPEC) |
15057 | { |
15058 | expansion_failed (NULL_TREE, rtl, |
15059 | reason: "Expression rejected for debug by the backend.\n" ); |
15060 | return false; |
15061 | } |
15062 | |
15063 | /* If delegitimize_address couldn't do anything with the UNSPEC, and |
15064 | the target hook doesn't explicitly allow it in debug info, assume |
15065 | we can't express it in the debug info. */ |
15066 | /* Don't complain about TLS UNSPECs, those are just too hard to |
15067 | delegitimize. Note this could be a non-decl SYMBOL_REF such as |
15068 | one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL |
15069 | rather than DECL_THREAD_LOCAL_P is not just an optimization. */ |
15070 | if (flag_checking |
15071 | && (XVECLEN (rtl, 0) == 0 |
15072 | || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF |
15073 | || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE)) |
15074 | inform (current_function_decl |
15075 | ? DECL_SOURCE_LOCATION (current_function_decl) |
15076 | : UNKNOWN_LOCATION, |
15077 | #if NUM_UNSPEC_VALUES > 0 |
15078 | "non-delegitimized UNSPEC %s (%d) found in variable location" , |
15079 | ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES) |
15080 | ? unspec_strings[XINT (rtl, 1)] : "unknown" ), |
15081 | #else |
15082 | "non-delegitimized UNSPEC %d found in variable location" , |
15083 | #endif |
15084 | XINT (rtl, 1)); |
15085 | expansion_failed (NULL_TREE, rtl, |
15086 | reason: "UNSPEC hasn't been delegitimized.\n" ); |
15087 | return false; |
15088 | } |
15089 | |
15090 | if (CONST_POLY_INT_P (rtl)) |
15091 | return false; |
15092 | |
15093 | /* FIXME: Refer to PR60655. It is possible for simplification |
15094 | of rtl expressions in var tracking to produce such expressions. |
15095 | We should really identify / validate expressions |
15096 | enclosed in CONST that can be handled by assemblers on various |
15097 | targets and only handle legitimate cases here. */ |
15098 | switch (GET_CODE (rtl)) |
15099 | { |
15100 | case SYMBOL_REF: |
15101 | break; |
15102 | case NOT: |
15103 | case NEG: |
15104 | return false; |
15105 | case PLUS: |
15106 | { |
15107 | /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the |
15108 | operands. */ |
15109 | subrtx_var_iterator::array_type array; |
15110 | bool first = false; |
15111 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL) |
15112 | if (SYMBOL_REF_P (*iter) |
15113 | || LABEL_P (*iter) |
15114 | || GET_CODE (*iter) == UNSPEC) |
15115 | { |
15116 | first = true; |
15117 | break; |
15118 | } |
15119 | if (!first) |
15120 | return true; |
15121 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL) |
15122 | if (SYMBOL_REF_P (*iter) |
15123 | || LABEL_P (*iter) |
15124 | || GET_CODE (*iter) == UNSPEC) |
15125 | return false; |
15126 | return true; |
15127 | } |
15128 | case MINUS: |
15129 | { |
15130 | /* Disallow negation of SYMBOL_REFs or UNSPECs when they |
15131 | appear in the second operand of MINUS. */ |
15132 | subrtx_var_iterator::array_type array; |
15133 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL) |
15134 | if (SYMBOL_REF_P (*iter) |
15135 | || LABEL_P (*iter) |
15136 | || GET_CODE (*iter) == UNSPEC) |
15137 | return false; |
15138 | return true; |
15139 | } |
15140 | default: |
15141 | return true; |
15142 | } |
15143 | |
15144 | if (CONSTANT_POOL_ADDRESS_P (rtl)) |
15145 | { |
15146 | bool marked; |
15147 | get_pool_constant_mark (rtl, &marked); |
15148 | /* If all references to this pool constant were optimized away, |
15149 | it was not output and thus we can't represent it. */ |
15150 | if (!marked) |
15151 | { |
15152 | expansion_failed (NULL_TREE, rtl, |
15153 | reason: "Constant was removed from constant pool.\n" ); |
15154 | return false; |
15155 | } |
15156 | } |
15157 | |
15158 | if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE) |
15159 | return false; |
15160 | |
15161 | /* Avoid references to external symbols in debug info, on several targets |
15162 | the linker might even refuse to link when linking a shared library, |
15163 | and in many other cases the relocations for .debug_info/.debug_loc are |
15164 | dropped, so the address becomes zero anyway. Hidden symbols, guaranteed |
15165 | to be defined within the same shared library or executable are fine. */ |
15166 | if (SYMBOL_REF_EXTERNAL_P (rtl)) |
15167 | { |
15168 | tree decl = SYMBOL_REF_DECL (rtl); |
15169 | |
15170 | if (decl == NULL || !targetm.binds_local_p (decl)) |
15171 | { |
15172 | expansion_failed (NULL_TREE, rtl, |
15173 | reason: "Symbol not defined in current TU.\n" ); |
15174 | return false; |
15175 | } |
15176 | } |
15177 | |
15178 | return true; |
15179 | } |
15180 | |
15181 | /* Return true if constant RTL can be emitted in DW_OP_addr or |
15182 | DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or |
15183 | non-marked constant pool SYMBOL_REFs can't be referenced in it. */ |
15184 | |
15185 | static bool |
15186 | const_ok_for_output (rtx rtl) |
15187 | { |
15188 | if (GET_CODE (rtl) == SYMBOL_REF) |
15189 | return const_ok_for_output_1 (rtl); |
15190 | |
15191 | if (GET_CODE (rtl) == CONST) |
15192 | { |
15193 | subrtx_var_iterator::array_type array; |
15194 | FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL) |
15195 | if (!const_ok_for_output_1 (rtl: *iter)) |
15196 | return false; |
15197 | return true; |
15198 | } |
15199 | |
15200 | return true; |
15201 | } |
15202 | |
15203 | /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP |
15204 | if possible, NULL otherwise. */ |
15205 | |
15206 | static dw_die_ref |
15207 | base_type_for_mode (machine_mode mode, bool unsignedp) |
15208 | { |
15209 | dw_die_ref type_die; |
15210 | tree type = lang_hooks.types.type_for_mode (mode, unsignedp); |
15211 | |
15212 | if (type == NULL) |
15213 | return NULL; |
15214 | switch (TREE_CODE (type)) |
15215 | { |
15216 | case INTEGER_TYPE: |
15217 | case REAL_TYPE: |
15218 | break; |
15219 | default: |
15220 | return NULL; |
15221 | } |
15222 | type_die = lookup_type_die (type); |
15223 | if (!type_die) |
15224 | type_die = modified_type_die (type, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
15225 | context_die: comp_unit_die ()); |
15226 | if (type_die == NULL || type_die->die_tag != DW_TAG_base_type) |
15227 | return NULL; |
15228 | return type_die; |
15229 | } |
15230 | |
15231 | /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned |
15232 | type matching MODE, or, if MODE is narrower than or as wide as |
15233 | DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not |
15234 | possible. */ |
15235 | |
15236 | static dw_loc_descr_ref |
15237 | convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op) |
15238 | { |
15239 | machine_mode outer_mode = mode; |
15240 | dw_die_ref type_die; |
15241 | dw_loc_descr_ref cvt; |
15242 | |
15243 | if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE) |
15244 | { |
15245 | add_loc_descr (list_head: &op, descr: new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0)); |
15246 | return op; |
15247 | } |
15248 | type_die = base_type_for_mode (mode: outer_mode, unsignedp: 1); |
15249 | if (type_die == NULL) |
15250 | return NULL; |
15251 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15252 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15253 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15254 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15255 | add_loc_descr (list_head: &op, descr: cvt); |
15256 | return op; |
15257 | } |
15258 | |
15259 | /* Return location descriptor for comparison OP with operands OP0 and OP1. */ |
15260 | |
15261 | static dw_loc_descr_ref |
15262 | compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0, |
15263 | dw_loc_descr_ref op1) |
15264 | { |
15265 | dw_loc_descr_ref ret = op0; |
15266 | add_loc_descr (list_head: &ret, descr: op1); |
15267 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
15268 | if (STORE_FLAG_VALUE != 1) |
15269 | { |
15270 | add_loc_descr (list_head: &ret, descr: int_loc_descriptor (STORE_FLAG_VALUE)); |
15271 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
15272 | } |
15273 | return ret; |
15274 | } |
15275 | |
15276 | /* Subroutine of scompare_loc_descriptor for the case in which we're |
15277 | comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE, |
15278 | and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */ |
15279 | |
15280 | static dw_loc_descr_ref |
15281 | scompare_loc_descriptor_wide (enum dwarf_location_atom op, |
15282 | scalar_int_mode op_mode, |
15283 | dw_loc_descr_ref op0, dw_loc_descr_ref op1) |
15284 | { |
15285 | dw_die_ref type_die = base_type_for_mode (mode: op_mode, unsignedp: 0); |
15286 | dw_loc_descr_ref cvt; |
15287 | |
15288 | if (type_die == NULL) |
15289 | return NULL; |
15290 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15291 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15292 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15293 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15294 | add_loc_descr (list_head: &op0, descr: cvt); |
15295 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15296 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15297 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15298 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15299 | add_loc_descr (list_head: &op1, descr: cvt); |
15300 | return compare_loc_descriptor (op, op0, op1); |
15301 | } |
15302 | |
15303 | /* Subroutine of scompare_loc_descriptor for the case in which we're |
15304 | comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE, |
15305 | and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */ |
15306 | |
15307 | static dw_loc_descr_ref |
15308 | scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl, |
15309 | scalar_int_mode op_mode, |
15310 | dw_loc_descr_ref op0, dw_loc_descr_ref op1) |
15311 | { |
15312 | int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: op_mode)) * BITS_PER_UNIT; |
15313 | /* For eq/ne, if the operands are known to be zero-extended, |
15314 | there is no need to do the fancy shifting up. */ |
15315 | if (op == DW_OP_eq || op == DW_OP_ne) |
15316 | { |
15317 | dw_loc_descr_ref last0, last1; |
15318 | for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next) |
15319 | ; |
15320 | for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next) |
15321 | ; |
15322 | /* deref_size zero extends, and for constants we can check |
15323 | whether they are zero extended or not. */ |
15324 | if (((last0->dw_loc_opc == DW_OP_deref_size |
15325 | && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (mode: op_mode)) |
15326 | || (CONST_INT_P (XEXP (rtl, 0)) |
15327 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0)) |
15328 | == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode)))) |
15329 | && ((last1->dw_loc_opc == DW_OP_deref_size |
15330 | && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (mode: op_mode)) |
15331 | || (CONST_INT_P (XEXP (rtl, 1)) |
15332 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1)) |
15333 | == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode))))) |
15334 | return compare_loc_descriptor (op, op0, op1); |
15335 | |
15336 | /* EQ/NE comparison against constant in narrower type than |
15337 | DWARF2_ADDR_SIZE can be performed either as |
15338 | DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift> |
15339 | DW_OP_{eq,ne} |
15340 | or |
15341 | DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask> |
15342 | DW_OP_{eq,ne}. Pick whatever is shorter. */ |
15343 | if (CONST_INT_P (XEXP (rtl, 1)) |
15344 | && GET_MODE_BITSIZE (mode: op_mode) < HOST_BITS_PER_WIDE_INT |
15345 | && (size_of_int_loc_descriptor (i: shift) + 1 |
15346 | + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift) |
15347 | >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1 |
15348 | + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1)) |
15349 | & GET_MODE_MASK (op_mode)))) |
15350 | { |
15351 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (GET_MODE_MASK (op_mode))); |
15352 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15353 | op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) |
15354 | & GET_MODE_MASK (op_mode)); |
15355 | return compare_loc_descriptor (op, op0, op1); |
15356 | } |
15357 | } |
15358 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: shift)); |
15359 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15360 | if (CONST_INT_P (XEXP (rtl, 1))) |
15361 | op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift); |
15362 | else |
15363 | { |
15364 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: shift)); |
15365 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15366 | } |
15367 | return compare_loc_descriptor (op, op0, op1); |
15368 | } |
15369 | |
15370 | /* Return location descriptor for signed comparison OP RTL. */ |
15371 | |
15372 | static dw_loc_descr_ref |
15373 | scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl, |
15374 | machine_mode mem_mode) |
15375 | { |
15376 | machine_mode op_mode = GET_MODE (XEXP (rtl, 0)); |
15377 | dw_loc_descr_ref op0, op1; |
15378 | |
15379 | if (op_mode == VOIDmode) |
15380 | op_mode = GET_MODE (XEXP (rtl, 1)); |
15381 | if (op_mode == VOIDmode) |
15382 | return NULL; |
15383 | |
15384 | scalar_int_mode int_op_mode; |
15385 | if (dwarf_strict |
15386 | && dwarf_version < 5 |
15387 | && (!is_a <scalar_int_mode> (m: op_mode, result: &int_op_mode) |
15388 | || GET_MODE_SIZE (mode: int_op_mode) > DWARF2_ADDR_SIZE)) |
15389 | return NULL; |
15390 | |
15391 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: op_mode, mem_mode, |
15392 | VAR_INIT_STATUS_INITIALIZED); |
15393 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode: op_mode, mem_mode, |
15394 | VAR_INIT_STATUS_INITIALIZED); |
15395 | |
15396 | if (op0 == NULL || op1 == NULL) |
15397 | return NULL; |
15398 | |
15399 | if (is_a <scalar_int_mode> (m: op_mode, result: &int_op_mode)) |
15400 | { |
15401 | if (GET_MODE_SIZE (mode: int_op_mode) < DWARF2_ADDR_SIZE) |
15402 | return scompare_loc_descriptor_narrow (op, rtl, op_mode: int_op_mode, op0, op1); |
15403 | |
15404 | if (GET_MODE_SIZE (mode: int_op_mode) > DWARF2_ADDR_SIZE) |
15405 | return scompare_loc_descriptor_wide (op, op_mode: int_op_mode, op0, op1); |
15406 | } |
15407 | return compare_loc_descriptor (op, op0, op1); |
15408 | } |
15409 | |
15410 | /* Return location descriptor for unsigned comparison OP RTL. */ |
15411 | |
15412 | static dw_loc_descr_ref |
15413 | ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl, |
15414 | machine_mode mem_mode) |
15415 | { |
15416 | dw_loc_descr_ref op0, op1; |
15417 | |
15418 | machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0)); |
15419 | if (test_op_mode == VOIDmode) |
15420 | test_op_mode = GET_MODE (XEXP (rtl, 1)); |
15421 | |
15422 | scalar_int_mode op_mode; |
15423 | if (!is_a <scalar_int_mode> (m: test_op_mode, result: &op_mode)) |
15424 | return NULL; |
15425 | |
15426 | if (dwarf_strict |
15427 | && dwarf_version < 5 |
15428 | && GET_MODE_SIZE (mode: op_mode) > DWARF2_ADDR_SIZE) |
15429 | return NULL; |
15430 | |
15431 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: op_mode, mem_mode, |
15432 | VAR_INIT_STATUS_INITIALIZED); |
15433 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode: op_mode, mem_mode, |
15434 | VAR_INIT_STATUS_INITIALIZED); |
15435 | |
15436 | if (op0 == NULL || op1 == NULL) |
15437 | return NULL; |
15438 | |
15439 | if (GET_MODE_SIZE (mode: op_mode) < DWARF2_ADDR_SIZE) |
15440 | { |
15441 | HOST_WIDE_INT mask = GET_MODE_MASK (op_mode); |
15442 | dw_loc_descr_ref last0, last1; |
15443 | for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next) |
15444 | ; |
15445 | for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next) |
15446 | ; |
15447 | if (CONST_INT_P (XEXP (rtl, 0))) |
15448 | op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask); |
15449 | /* deref_size zero extends, so no need to mask it again. */ |
15450 | else if (last0->dw_loc_opc != DW_OP_deref_size |
15451 | || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (mode: op_mode)) |
15452 | { |
15453 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: mask)); |
15454 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15455 | } |
15456 | if (CONST_INT_P (XEXP (rtl, 1))) |
15457 | op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask); |
15458 | /* deref_size zero extends, so no need to mask it again. */ |
15459 | else if (last1->dw_loc_opc != DW_OP_deref_size |
15460 | || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (mode: op_mode)) |
15461 | { |
15462 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: mask)); |
15463 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15464 | } |
15465 | } |
15466 | else if (GET_MODE_SIZE (mode: op_mode) == DWARF2_ADDR_SIZE) |
15467 | { |
15468 | HOST_WIDE_INT bias = 1; |
15469 | bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1); |
15470 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
15471 | if (CONST_INT_P (XEXP (rtl, 1))) |
15472 | op1 = int_loc_descriptor (poly_i: (unsigned HOST_WIDE_INT) bias |
15473 | + INTVAL (XEXP (rtl, 1))); |
15474 | else |
15475 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_plus_uconst, |
15476 | oprnd1: bias, oprnd2: 0)); |
15477 | } |
15478 | return compare_loc_descriptor (op, op0, op1); |
15479 | } |
15480 | |
15481 | /* Return location descriptor for {U,S}{MIN,MAX}. */ |
15482 | |
15483 | static dw_loc_descr_ref |
15484 | minmax_loc_descriptor (rtx rtl, machine_mode mode, |
15485 | machine_mode mem_mode) |
15486 | { |
15487 | enum dwarf_location_atom op; |
15488 | dw_loc_descr_ref op0, op1, ret; |
15489 | dw_loc_descr_ref bra_node, drop_node; |
15490 | |
15491 | scalar_int_mode int_mode; |
15492 | if (dwarf_strict |
15493 | && dwarf_version < 5 |
15494 | && (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
15495 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE)) |
15496 | return NULL; |
15497 | |
15498 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15499 | VAR_INIT_STATUS_INITIALIZED); |
15500 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
15501 | VAR_INIT_STATUS_INITIALIZED); |
15502 | |
15503 | if (op0 == NULL || op1 == NULL) |
15504 | return NULL; |
15505 | |
15506 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
15507 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15508 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
15509 | if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX) |
15510 | { |
15511 | /* Checked by the caller. */ |
15512 | int_mode = as_a <scalar_int_mode> (m: mode); |
15513 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
15514 | { |
15515 | HOST_WIDE_INT mask = GET_MODE_MASK (int_mode); |
15516 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: mask)); |
15517 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15518 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: mask)); |
15519 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15520 | } |
15521 | else if (GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE) |
15522 | { |
15523 | HOST_WIDE_INT bias = 1; |
15524 | bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1); |
15525 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
15526 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: bias, oprnd2: 0)); |
15527 | } |
15528 | } |
15529 | else if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
15530 | && GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
15531 | { |
15532 | int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: int_mode)) * BITS_PER_UNIT; |
15533 | add_loc_descr (list_head: &op0, descr: int_loc_descriptor (poly_i: shift)); |
15534 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15535 | add_loc_descr (list_head: &op1, descr: int_loc_descriptor (poly_i: shift)); |
15536 | add_loc_descr (list_head: &op1, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15537 | } |
15538 | else if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
15539 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
15540 | { |
15541 | dw_die_ref type_die = base_type_for_mode (mode: int_mode, unsignedp: 0); |
15542 | dw_loc_descr_ref cvt; |
15543 | if (type_die == NULL) |
15544 | return NULL; |
15545 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15546 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15547 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15548 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15549 | add_loc_descr (list_head: &op0, descr: cvt); |
15550 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15551 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15552 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15553 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15554 | add_loc_descr (list_head: &op1, descr: cvt); |
15555 | } |
15556 | |
15557 | if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN) |
15558 | op = DW_OP_lt; |
15559 | else |
15560 | op = DW_OP_gt; |
15561 | ret = op0; |
15562 | add_loc_descr (list_head: &ret, descr: op1); |
15563 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
15564 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15565 | add_loc_descr (list_head: &ret, descr: bra_node); |
15566 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15567 | drop_node = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
15568 | add_loc_descr (list_head: &ret, descr: drop_node); |
15569 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15570 | bra_node->dw_loc_oprnd1.v.val_loc = drop_node; |
15571 | if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX) |
15572 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
15573 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
15574 | ret = convert_descriptor_to_mode (mode: int_mode, op: ret); |
15575 | return ret; |
15576 | } |
15577 | |
15578 | /* Helper function for mem_loc_descriptor. Perform OP binary op, |
15579 | but after converting arguments to type_die, afterwards |
15580 | convert back to unsigned. */ |
15581 | |
15582 | static dw_loc_descr_ref |
15583 | typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die, |
15584 | scalar_int_mode mode, machine_mode mem_mode) |
15585 | { |
15586 | dw_loc_descr_ref cvt, op0, op1; |
15587 | |
15588 | if (type_die == NULL) |
15589 | return NULL; |
15590 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15591 | VAR_INIT_STATUS_INITIALIZED); |
15592 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
15593 | VAR_INIT_STATUS_INITIALIZED); |
15594 | if (op0 == NULL || op1 == NULL) |
15595 | return NULL; |
15596 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15597 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15598 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15599 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15600 | add_loc_descr (list_head: &op0, descr: cvt); |
15601 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
15602 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15603 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
15604 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15605 | add_loc_descr (list_head: &op1, descr: cvt); |
15606 | add_loc_descr (list_head: &op0, descr: op1); |
15607 | add_loc_descr (list_head: &op0, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
15608 | return convert_descriptor_to_mode (mode, op: op0); |
15609 | } |
15610 | |
15611 | /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value, |
15612 | const0 is DW_OP_lit0 or corresponding typed constant, |
15613 | const1 is DW_OP_lit1 or corresponding typed constant |
15614 | and constMSB is constant with just the MSB bit set |
15615 | for the mode): |
15616 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> |
15617 | L1: const0 DW_OP_swap |
15618 | L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl |
15619 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
15620 | L3: DW_OP_drop |
15621 | L4: DW_OP_nop |
15622 | |
15623 | CTZ is similar: |
15624 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4> |
15625 | L1: const0 DW_OP_swap |
15626 | L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr |
15627 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
15628 | L3: DW_OP_drop |
15629 | L4: DW_OP_nop |
15630 | |
15631 | FFS is similar: |
15632 | DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4> |
15633 | L1: const1 DW_OP_swap |
15634 | L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr |
15635 | DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2> |
15636 | L3: DW_OP_drop |
15637 | L4: DW_OP_nop */ |
15638 | |
15639 | static dw_loc_descr_ref |
15640 | clz_loc_descriptor (rtx rtl, scalar_int_mode mode, |
15641 | machine_mode mem_mode) |
15642 | { |
15643 | dw_loc_descr_ref op0, ret, tmp; |
15644 | HOST_WIDE_INT valv; |
15645 | dw_loc_descr_ref l1jump, l1label; |
15646 | dw_loc_descr_ref l2jump, l2label; |
15647 | dw_loc_descr_ref l3jump, l3label; |
15648 | dw_loc_descr_ref l4jump, l4label; |
15649 | rtx msb; |
15650 | |
15651 | if (GET_MODE (XEXP (rtl, 0)) != mode) |
15652 | return NULL; |
15653 | |
15654 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15655 | VAR_INIT_STATUS_INITIALIZED); |
15656 | if (op0 == NULL) |
15657 | return NULL; |
15658 | ret = op0; |
15659 | if (GET_CODE (rtl) == CLZ) |
15660 | { |
15661 | if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv)) |
15662 | valv = GET_MODE_BITSIZE (mode); |
15663 | } |
15664 | else if (GET_CODE (rtl) == FFS) |
15665 | valv = 0; |
15666 | else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv)) |
15667 | valv = GET_MODE_BITSIZE (mode); |
15668 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
15669 | l1jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15670 | add_loc_descr (list_head: &ret, descr: l1jump); |
15671 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0)); |
15672 | tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode, |
15673 | VAR_INIT_STATUS_INITIALIZED); |
15674 | if (tmp == NULL) |
15675 | return NULL; |
15676 | add_loc_descr (list_head: &ret, descr: tmp); |
15677 | l4jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
15678 | add_loc_descr (list_head: &ret, descr: l4jump); |
15679 | l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS |
15680 | ? const1_rtx : const0_rtx, |
15681 | mode, mem_mode, |
15682 | VAR_INIT_STATUS_INITIALIZED); |
15683 | if (l1label == NULL) |
15684 | return NULL; |
15685 | add_loc_descr (list_head: &ret, descr: l1label); |
15686 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15687 | l2label = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
15688 | add_loc_descr (list_head: &ret, descr: l2label); |
15689 | if (GET_CODE (rtl) != CLZ) |
15690 | msb = const1_rtx; |
15691 | else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
15692 | msb = GEN_INT (HOST_WIDE_INT_1U |
15693 | << (GET_MODE_BITSIZE (mode) - 1)); |
15694 | else |
15695 | msb = immed_wide_int_const |
15696 | (wi::set_bit_in_zero (bit: GET_MODE_PRECISION (mode) - 1, |
15697 | precision: GET_MODE_PRECISION (mode)), mode); |
15698 | if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0) |
15699 | tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 |
15700 | ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64 |
15701 | ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), oprnd2: 0); |
15702 | else |
15703 | tmp = mem_loc_descriptor (msb, mode, mem_mode, |
15704 | VAR_INIT_STATUS_INITIALIZED); |
15705 | if (tmp == NULL) |
15706 | return NULL; |
15707 | add_loc_descr (list_head: &ret, descr: tmp); |
15708 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15709 | l3jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15710 | add_loc_descr (list_head: &ret, descr: l3jump); |
15711 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
15712 | VAR_INIT_STATUS_INITIALIZED); |
15713 | if (tmp == NULL) |
15714 | return NULL; |
15715 | add_loc_descr (list_head: &ret, descr: tmp); |
15716 | add_loc_descr (list_head: &ret, descr: new_loc_descr (GET_CODE (rtl) == CLZ |
15717 | ? DW_OP_shl : DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
15718 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15719 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: 1, oprnd2: 0)); |
15720 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15721 | l2jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
15722 | add_loc_descr (list_head: &ret, descr: l2jump); |
15723 | l3label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
15724 | add_loc_descr (list_head: &ret, descr: l3label); |
15725 | l4label = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
15726 | add_loc_descr (list_head: &ret, descr: l4label); |
15727 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15728 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
15729 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15730 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
15731 | l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15732 | l3jump->dw_loc_oprnd1.v.val_loc = l3label; |
15733 | l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15734 | l4jump->dw_loc_oprnd1.v.val_loc = l4label; |
15735 | return ret; |
15736 | } |
15737 | |
15738 | /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant, |
15739 | const1 is DW_OP_lit1 or corresponding typed constant): |
15740 | const0 DW_OP_swap |
15741 | L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and |
15742 | DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> |
15743 | L2: DW_OP_drop |
15744 | |
15745 | PARITY is similar: |
15746 | L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and |
15747 | DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1> |
15748 | L2: DW_OP_drop */ |
15749 | |
15750 | static dw_loc_descr_ref |
15751 | popcount_loc_descriptor (rtx rtl, scalar_int_mode mode, |
15752 | machine_mode mem_mode) |
15753 | { |
15754 | dw_loc_descr_ref op0, ret, tmp; |
15755 | dw_loc_descr_ref l1jump, l1label; |
15756 | dw_loc_descr_ref l2jump, l2label; |
15757 | |
15758 | if (GET_MODE (XEXP (rtl, 0)) != mode) |
15759 | return NULL; |
15760 | |
15761 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15762 | VAR_INIT_STATUS_INITIALIZED); |
15763 | if (op0 == NULL) |
15764 | return NULL; |
15765 | ret = op0; |
15766 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
15767 | VAR_INIT_STATUS_INITIALIZED); |
15768 | if (tmp == NULL) |
15769 | return NULL; |
15770 | add_loc_descr (list_head: &ret, descr: tmp); |
15771 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15772 | l1label = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
15773 | add_loc_descr (list_head: &ret, descr: l1label); |
15774 | l2jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15775 | add_loc_descr (list_head: &ret, descr: l2jump); |
15776 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
15777 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_rot, oprnd1: 0, oprnd2: 0)); |
15778 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
15779 | VAR_INIT_STATUS_INITIALIZED); |
15780 | if (tmp == NULL) |
15781 | return NULL; |
15782 | add_loc_descr (list_head: &ret, descr: tmp); |
15783 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15784 | add_loc_descr (list_head: &ret, descr: new_loc_descr (GET_CODE (rtl) == POPCOUNT |
15785 | ? DW_OP_plus : DW_OP_xor, oprnd1: 0, oprnd2: 0)); |
15786 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15787 | tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode, |
15788 | VAR_INIT_STATUS_INITIALIZED); |
15789 | add_loc_descr (list_head: &ret, descr: tmp); |
15790 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
15791 | l1jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
15792 | add_loc_descr (list_head: &ret, descr: l1jump); |
15793 | l2label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
15794 | add_loc_descr (list_head: &ret, descr: l2label); |
15795 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15796 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
15797 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15798 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
15799 | return ret; |
15800 | } |
15801 | |
15802 | /* BSWAP (constS is initial shift count, either 56 or 24): |
15803 | constS const0 |
15804 | L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr |
15805 | const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or |
15806 | DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8 |
15807 | DW_OP_minus DW_OP_swap DW_OP_skip <L1> |
15808 | L2: DW_OP_drop DW_OP_swap DW_OP_drop */ |
15809 | |
15810 | static dw_loc_descr_ref |
15811 | bswap_loc_descriptor (rtx rtl, scalar_int_mode mode, |
15812 | machine_mode mem_mode) |
15813 | { |
15814 | dw_loc_descr_ref op0, ret, tmp; |
15815 | dw_loc_descr_ref l1jump, l1label; |
15816 | dw_loc_descr_ref l2jump, l2label; |
15817 | |
15818 | if (BITS_PER_UNIT != 8 |
15819 | || (GET_MODE_BITSIZE (mode) != 32 |
15820 | && GET_MODE_BITSIZE (mode) != 64)) |
15821 | return NULL; |
15822 | |
15823 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15824 | VAR_INIT_STATUS_INITIALIZED); |
15825 | if (op0 == NULL) |
15826 | return NULL; |
15827 | |
15828 | ret = op0; |
15829 | tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), |
15830 | mode, mem_mode, |
15831 | VAR_INIT_STATUS_INITIALIZED); |
15832 | if (tmp == NULL) |
15833 | return NULL; |
15834 | add_loc_descr (list_head: &ret, descr: tmp); |
15835 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
15836 | VAR_INIT_STATUS_INITIALIZED); |
15837 | if (tmp == NULL) |
15838 | return NULL; |
15839 | add_loc_descr (list_head: &ret, descr: tmp); |
15840 | l1label = new_loc_descr (op: DW_OP_pick, oprnd1: 2, oprnd2: 0); |
15841 | add_loc_descr (list_head: &ret, descr: l1label); |
15842 | tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8), |
15843 | mode, mem_mode, |
15844 | VAR_INIT_STATUS_INITIALIZED); |
15845 | add_loc_descr (list_head: &ret, descr: tmp); |
15846 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_pick, oprnd1: 3, oprnd2: 0)); |
15847 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
15848 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
15849 | tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode, |
15850 | VAR_INIT_STATUS_INITIALIZED); |
15851 | if (tmp == NULL) |
15852 | return NULL; |
15853 | add_loc_descr (list_head: &ret, descr: tmp); |
15854 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15855 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_pick, oprnd1: 2, oprnd2: 0)); |
15856 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15857 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_or, oprnd1: 0, oprnd2: 0)); |
15858 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15859 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
15860 | tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode, |
15861 | VAR_INIT_STATUS_INITIALIZED); |
15862 | add_loc_descr (list_head: &ret, descr: tmp); |
15863 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_eq, oprnd1: 0, oprnd2: 0)); |
15864 | l2jump = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
15865 | add_loc_descr (list_head: &ret, descr: l2jump); |
15866 | tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode, |
15867 | VAR_INIT_STATUS_INITIALIZED); |
15868 | add_loc_descr (list_head: &ret, descr: tmp); |
15869 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
15870 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15871 | l1jump = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
15872 | add_loc_descr (list_head: &ret, descr: l1jump); |
15873 | l2label = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
15874 | add_loc_descr (list_head: &ret, descr: l2label); |
15875 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15876 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0)); |
15877 | l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15878 | l1jump->dw_loc_oprnd1.v.val_loc = l1label; |
15879 | l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc; |
15880 | l2jump->dw_loc_oprnd1.v.val_loc = l2label; |
15881 | return ret; |
15882 | } |
15883 | |
15884 | /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode): |
15885 | DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot |
15886 | [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg |
15887 | DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or |
15888 | |
15889 | ROTATERT is similar: |
15890 | DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE> |
15891 | DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot |
15892 | [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */ |
15893 | |
15894 | static dw_loc_descr_ref |
15895 | rotate_loc_descriptor (rtx rtl, scalar_int_mode mode, |
15896 | machine_mode mem_mode) |
15897 | { |
15898 | rtx rtlop1 = XEXP (rtl, 1); |
15899 | dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL }; |
15900 | int i; |
15901 | |
15902 | if (is_narrower_int_mode (GET_MODE (rtlop1), limit: mode)) |
15903 | rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1); |
15904 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
15905 | VAR_INIT_STATUS_INITIALIZED); |
15906 | op1 = mem_loc_descriptor (rtlop1, mode, mem_mode, |
15907 | VAR_INIT_STATUS_INITIALIZED); |
15908 | if (op0 == NULL || op1 == NULL) |
15909 | return NULL; |
15910 | if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE) |
15911 | for (i = 0; i < 2; i++) |
15912 | { |
15913 | if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT) |
15914 | mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)), |
15915 | mode, mem_mode, |
15916 | VAR_INIT_STATUS_INITIALIZED); |
15917 | else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT) |
15918 | mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32 |
15919 | ? DW_OP_const4u |
15920 | : HOST_BITS_PER_WIDE_INT == 64 |
15921 | ? DW_OP_const8u : DW_OP_constu, |
15922 | GET_MODE_MASK (mode), oprnd2: 0); |
15923 | else |
15924 | mask[i] = NULL; |
15925 | if (mask[i] == NULL) |
15926 | return NULL; |
15927 | add_loc_descr (list_head: &mask[i], descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
15928 | } |
15929 | ret = op0; |
15930 | add_loc_descr (list_head: &ret, descr: op1); |
15931 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
15932 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
15933 | if (GET_CODE (rtl) == ROTATERT) |
15934 | { |
15935 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
15936 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, |
15937 | oprnd1: GET_MODE_BITSIZE (mode), oprnd2: 0)); |
15938 | } |
15939 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
15940 | if (mask[0] != NULL) |
15941 | add_loc_descr (list_head: &ret, descr: mask[0]); |
15942 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_rot, oprnd1: 0, oprnd2: 0)); |
15943 | if (mask[1] != NULL) |
15944 | { |
15945 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15946 | add_loc_descr (list_head: &ret, descr: mask[1]); |
15947 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
15948 | } |
15949 | if (GET_CODE (rtl) == ROTATE) |
15950 | { |
15951 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_neg, oprnd1: 0, oprnd2: 0)); |
15952 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_plus_uconst, |
15953 | oprnd1: GET_MODE_BITSIZE (mode), oprnd2: 0)); |
15954 | } |
15955 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
15956 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_or, oprnd1: 0, oprnd2: 0)); |
15957 | return ret; |
15958 | } |
15959 | |
15960 | /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref |
15961 | for DEBUG_PARAMETER_REF RTL. */ |
15962 | |
15963 | static dw_loc_descr_ref |
15964 | parameter_ref_descriptor (rtx rtl) |
15965 | { |
15966 | dw_loc_descr_ref ret; |
15967 | dw_die_ref ref; |
15968 | |
15969 | if (dwarf_strict) |
15970 | return NULL; |
15971 | gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL); |
15972 | /* With LTO during LTRANS we get the late DIE that refers to the early |
15973 | DIE, thus we add another indirection here. This seems to confuse |
15974 | gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */ |
15975 | ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl)); |
15976 | ret = new_loc_descr (op: DW_OP_GNU_parameter_ref, oprnd1: 0, oprnd2: 0); |
15977 | if (ref) |
15978 | { |
15979 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
15980 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
15981 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
15982 | } |
15983 | else |
15984 | { |
15985 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
15986 | ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl); |
15987 | } |
15988 | return ret; |
15989 | } |
15990 | |
15991 | /* The following routine converts the RTL for a variable or parameter |
15992 | (resident in memory) into an equivalent Dwarf representation of a |
15993 | mechanism for getting the address of that same variable onto the top of a |
15994 | hypothetical "address evaluation" stack. |
15995 | |
15996 | When creating memory location descriptors, we are effectively transforming |
15997 | the RTL for a memory-resident object into its Dwarf postfix expression |
15998 | equivalent. This routine recursively descends an RTL tree, turning |
15999 | it into Dwarf postfix code as it goes. |
16000 | |
16001 | MODE is the mode that should be assumed for the rtl if it is VOIDmode. |
16002 | |
16003 | MEM_MODE is the mode of the memory reference, needed to handle some |
16004 | autoincrement addressing modes. |
16005 | |
16006 | Return 0 if we can't represent the location. */ |
16007 | |
16008 | dw_loc_descr_ref |
16009 | mem_loc_descriptor (rtx rtl, machine_mode mode, |
16010 | machine_mode mem_mode, |
16011 | enum var_init_status initialized) |
16012 | { |
16013 | dw_loc_descr_ref mem_loc_result = NULL; |
16014 | enum dwarf_location_atom op; |
16015 | dw_loc_descr_ref op0, op1; |
16016 | rtx inner = NULL_RTX; |
16017 | |
16018 | if (mode == VOIDmode) |
16019 | mode = GET_MODE (rtl); |
16020 | |
16021 | /* Note that for a dynamically sized array, the location we will generate a |
16022 | description of here will be the lowest numbered location which is |
16023 | actually within the array. That's *not* necessarily the same as the |
16024 | zeroth element of the array. */ |
16025 | |
16026 | rtl = targetm.delegitimize_address (rtl); |
16027 | |
16028 | if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode) |
16029 | return NULL; |
16030 | |
16031 | scalar_int_mode int_mode = BImode, inner_mode, op1_mode; |
16032 | switch (GET_CODE (rtl)) |
16033 | { |
16034 | case POST_INC: |
16035 | case POST_DEC: |
16036 | case POST_MODIFY: |
16037 | return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized); |
16038 | |
16039 | case SUBREG: |
16040 | /* The case of a subreg may arise when we have a local (register) |
16041 | variable or a formal (register) parameter which doesn't quite fill |
16042 | up an entire register. For now, just assume that it is |
16043 | legitimate to make the Dwarf info refer to the whole register which |
16044 | contains the given subreg. */ |
16045 | if (!subreg_lowpart_p (rtl)) |
16046 | break; |
16047 | inner = SUBREG_REG (rtl); |
16048 | /* FALLTHRU */ |
16049 | case TRUNCATE: |
16050 | if (inner == NULL_RTX) |
16051 | inner = XEXP (rtl, 0); |
16052 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16053 | && is_a <scalar_int_mode> (GET_MODE (inner), result: &inner_mode) |
16054 | && (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16055 | #ifdef POINTERS_EXTEND_UNSIGNED |
16056 | || (int_mode == Pmode && mem_mode != VOIDmode) |
16057 | #endif |
16058 | ) |
16059 | && GET_MODE_SIZE (mode: inner_mode) <= DWARF2_ADDR_SIZE) |
16060 | { |
16061 | mem_loc_result = mem_loc_descriptor (rtl: inner, |
16062 | mode: inner_mode, |
16063 | mem_mode, initialized); |
16064 | break; |
16065 | } |
16066 | if (dwarf_strict && dwarf_version < 5) |
16067 | break; |
16068 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16069 | && is_a <scalar_int_mode> (GET_MODE (inner), result: &inner_mode) |
16070 | ? GET_MODE_SIZE (mode: int_mode) <= GET_MODE_SIZE (mode: inner_mode) |
16071 | : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner)))) |
16072 | { |
16073 | dw_die_ref type_die; |
16074 | dw_loc_descr_ref cvt; |
16075 | |
16076 | mem_loc_result = mem_loc_descriptor (rtl: inner, |
16077 | GET_MODE (inner), |
16078 | mem_mode, initialized); |
16079 | if (mem_loc_result == NULL) |
16080 | break; |
16081 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16082 | if (type_die == NULL) |
16083 | { |
16084 | mem_loc_result = NULL; |
16085 | break; |
16086 | } |
16087 | if (maybe_ne (a: GET_MODE_SIZE (mode), b: GET_MODE_SIZE (GET_MODE (inner)))) |
16088 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16089 | else |
16090 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_reinterpret), oprnd1: 0, oprnd2: 0); |
16091 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16092 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16093 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16094 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
16095 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16096 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE) |
16097 | { |
16098 | /* Convert it to untyped afterwards. */ |
16099 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16100 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
16101 | } |
16102 | } |
16103 | break; |
16104 | |
16105 | case REG: |
16106 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16107 | || (GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE |
16108 | && rtl != arg_pointer_rtx |
16109 | && rtl != frame_pointer_rtx |
16110 | #ifdef POINTERS_EXTEND_UNSIGNED |
16111 | && (int_mode != Pmode || mem_mode == VOIDmode) |
16112 | #endif |
16113 | )) |
16114 | { |
16115 | dw_die_ref type_die; |
16116 | unsigned int debugger_regnum; |
16117 | |
16118 | if (dwarf_strict && dwarf_version < 5) |
16119 | break; |
16120 | if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER) |
16121 | break; |
16122 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16123 | if (type_die == NULL) |
16124 | break; |
16125 | |
16126 | debugger_regnum = debugger_reg_number (rtl); |
16127 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
16128 | break; |
16129 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_regval_type), |
16130 | oprnd1: debugger_regnum, oprnd2: 0); |
16131 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
16132 | mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
16133 | mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0; |
16134 | break; |
16135 | } |
16136 | /* Whenever a register number forms a part of the description of the |
16137 | method for calculating the (dynamic) address of a memory resident |
16138 | object, DWARF rules require the register number be referred to as |
16139 | a "base register". This distinction is not based in any way upon |
16140 | what category of register the hardware believes the given register |
16141 | belongs to. This is strictly DWARF terminology we're dealing with |
16142 | here. Note that in cases where the location of a memory-resident |
16143 | data object could be expressed as: OP_ADD (OP_BASEREG (basereg), |
16144 | OP_CONST (0)) the actual DWARF location descriptor that we generate |
16145 | may just be OP_BASEREG (basereg). This may look deceptively like |
16146 | the object in question was allocated to a register (rather than in |
16147 | memory) so DWARF consumers need to be aware of the subtle |
16148 | distinction between OP_REG and OP_BASEREG. */ |
16149 | if (REGNO (rtl) < FIRST_PSEUDO_REGISTER) |
16150 | mem_loc_result = based_loc_descr (reg: rtl, offset: 0, initialized: VAR_INIT_STATUS_INITIALIZED); |
16151 | else if (stack_realign_drap |
16152 | && crtl->drap_reg |
16153 | && crtl->args.internal_arg_pointer == rtl |
16154 | && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER) |
16155 | { |
16156 | /* If RTL is internal_arg_pointer, which has been optimized |
16157 | out, use DRAP instead. */ |
16158 | mem_loc_result = based_loc_descr (crtl->drap_reg, offset: 0, |
16159 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16160 | } |
16161 | break; |
16162 | |
16163 | case SIGN_EXTEND: |
16164 | case ZERO_EXTEND: |
16165 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16166 | || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &inner_mode)) |
16167 | break; |
16168 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: inner_mode, |
16169 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16170 | if (op0 == 0) |
16171 | break; |
16172 | else if (GET_CODE (rtl) == ZERO_EXTEND |
16173 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16174 | && GET_MODE_BITSIZE (mode: inner_mode) < HOST_BITS_PER_WIDE_INT |
16175 | /* If DW_OP_const{1,2,4}u won't be used, it is shorter |
16176 | to expand zero extend as two shifts instead of |
16177 | masking. */ |
16178 | && GET_MODE_SIZE (mode: inner_mode) <= 4) |
16179 | { |
16180 | mem_loc_result = op0; |
16181 | add_loc_descr (list_head: &mem_loc_result, |
16182 | descr: int_loc_descriptor (GET_MODE_MASK (inner_mode))); |
16183 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_and, oprnd1: 0, oprnd2: 0)); |
16184 | } |
16185 | else if (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE) |
16186 | { |
16187 | int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (mode: inner_mode); |
16188 | shift *= BITS_PER_UNIT; |
16189 | if (GET_CODE (rtl) == SIGN_EXTEND) |
16190 | op = DW_OP_shra; |
16191 | else |
16192 | op = DW_OP_shr; |
16193 | mem_loc_result = op0; |
16194 | add_loc_descr (list_head: &mem_loc_result, descr: int_loc_descriptor (poly_i: shift)); |
16195 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
16196 | add_loc_descr (list_head: &mem_loc_result, descr: int_loc_descriptor (poly_i: shift)); |
16197 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16198 | } |
16199 | else if (!dwarf_strict || dwarf_version >= 5) |
16200 | { |
16201 | dw_die_ref type_die1, type_die2; |
16202 | dw_loc_descr_ref cvt; |
16203 | |
16204 | type_die1 = base_type_for_mode (mode: inner_mode, |
16205 | GET_CODE (rtl) == ZERO_EXTEND); |
16206 | if (type_die1 == NULL) |
16207 | break; |
16208 | type_die2 = base_type_for_mode (mode: int_mode, unsignedp: 1); |
16209 | if (type_die2 == NULL) |
16210 | break; |
16211 | mem_loc_result = op0; |
16212 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16213 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16214 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1; |
16215 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16216 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
16217 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16218 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16219 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2; |
16220 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16221 | add_loc_descr (list_head: &mem_loc_result, descr: cvt); |
16222 | } |
16223 | break; |
16224 | |
16225 | case MEM: |
16226 | { |
16227 | rtx new_rtl = avoid_constant_pool_reference (rtl); |
16228 | if (new_rtl != rtl) |
16229 | { |
16230 | mem_loc_result = mem_loc_descriptor (rtl: new_rtl, mode, mem_mode, |
16231 | initialized); |
16232 | if (mem_loc_result != NULL) |
16233 | return mem_loc_result; |
16234 | } |
16235 | } |
16236 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), |
16237 | mode: get_address_mode (mem: rtl), mem_mode: mode, |
16238 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16239 | if (mem_loc_result == NULL) |
16240 | mem_loc_result = tls_mem_loc_descriptor (mem: rtl); |
16241 | if (mem_loc_result != NULL) |
16242 | { |
16243 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16244 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
16245 | { |
16246 | dw_die_ref type_die; |
16247 | dw_loc_descr_ref deref; |
16248 | HOST_WIDE_INT size; |
16249 | |
16250 | if (dwarf_strict && dwarf_version < 5) |
16251 | return NULL; |
16252 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
16253 | return NULL; |
16254 | type_die |
16255 | = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16256 | if (type_die == NULL) |
16257 | return NULL; |
16258 | deref = new_loc_descr (op: dwarf_OP (op: DW_OP_deref_type), oprnd1: size, oprnd2: 0); |
16259 | deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
16260 | deref->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
16261 | deref->dw_loc_oprnd2.v.val_die_ref.external = 0; |
16262 | add_loc_descr (list_head: &mem_loc_result, descr: deref); |
16263 | } |
16264 | else if (GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE) |
16265 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0)); |
16266 | else |
16267 | add_loc_descr (list_head: &mem_loc_result, |
16268 | descr: new_loc_descr (op: DW_OP_deref_size, |
16269 | oprnd1: GET_MODE_SIZE (mode: int_mode), oprnd2: 0)); |
16270 | } |
16271 | break; |
16272 | |
16273 | case LO_SUM: |
16274 | return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized); |
16275 | |
16276 | case LABEL_REF: |
16277 | /* Some ports can transform a symbol ref into a label ref, because |
16278 | the symbol ref is too far away and has to be dumped into a constant |
16279 | pool. */ |
16280 | case CONST: |
16281 | case SYMBOL_REF: |
16282 | case UNSPEC: |
16283 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16284 | || (GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE |
16285 | #ifdef POINTERS_EXTEND_UNSIGNED |
16286 | && (int_mode != Pmode || mem_mode == VOIDmode) |
16287 | #endif |
16288 | )) |
16289 | break; |
16290 | |
16291 | if (GET_CODE (rtl) == UNSPEC) |
16292 | { |
16293 | /* If delegitimize_address couldn't do anything with the UNSPEC, we |
16294 | can't express it in the debug info. This can happen e.g. with some |
16295 | TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend |
16296 | approves. */ |
16297 | bool not_ok = false; |
16298 | subrtx_var_iterator::array_type array; |
16299 | FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL) |
16300 | if (*iter != rtl && !CONSTANT_P (*iter)) |
16301 | { |
16302 | not_ok = true; |
16303 | break; |
16304 | } |
16305 | |
16306 | if (not_ok) |
16307 | break; |
16308 | |
16309 | FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL) |
16310 | if (!const_ok_for_output_1 (rtl: *iter)) |
16311 | { |
16312 | not_ok = true; |
16313 | break; |
16314 | } |
16315 | |
16316 | if (not_ok) |
16317 | break; |
16318 | |
16319 | rtl = gen_rtx_CONST (GET_MODE (rtl), rtl); |
16320 | goto symref; |
16321 | } |
16322 | |
16323 | if (GET_CODE (rtl) == SYMBOL_REF |
16324 | && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE) |
16325 | { |
16326 | dw_loc_descr_ref temp; |
16327 | |
16328 | /* If this is not defined, we have no way to emit the data. */ |
16329 | if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel) |
16330 | break; |
16331 | |
16332 | temp = new_addr_loc_descr (addr: rtl, dtprel: dtprel_true); |
16333 | |
16334 | /* We check for DWARF 5 here because gdb did not implement |
16335 | DW_OP_form_tls_address until after 7.12. */ |
16336 | mem_loc_result = new_loc_descr (op: (dwarf_version >= 5 |
16337 | ? DW_OP_form_tls_address |
16338 | : DW_OP_GNU_push_tls_address), |
16339 | oprnd1: 0, oprnd2: 0); |
16340 | add_loc_descr (list_head: &mem_loc_result, descr: temp); |
16341 | |
16342 | break; |
16343 | } |
16344 | |
16345 | if (!const_ok_for_output (rtl)) |
16346 | { |
16347 | if (GET_CODE (rtl) == CONST) |
16348 | switch (GET_CODE (XEXP (rtl, 0))) |
16349 | { |
16350 | case NOT: |
16351 | op = DW_OP_not; |
16352 | goto try_const_unop; |
16353 | case NEG: |
16354 | op = DW_OP_neg; |
16355 | goto try_const_unop; |
16356 | try_const_unop: |
16357 | rtx arg; |
16358 | arg = XEXP (XEXP (rtl, 0), 0); |
16359 | if (!CONSTANT_P (arg)) |
16360 | arg = gen_rtx_CONST (int_mode, arg); |
16361 | op0 = mem_loc_descriptor (rtl: arg, mode: int_mode, mem_mode, |
16362 | initialized); |
16363 | if (op0) |
16364 | { |
16365 | mem_loc_result = op0; |
16366 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16367 | } |
16368 | break; |
16369 | default: |
16370 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode: int_mode, |
16371 | mem_mode, initialized); |
16372 | break; |
16373 | } |
16374 | break; |
16375 | } |
16376 | |
16377 | symref: |
16378 | mem_loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
16379 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
16380 | break; |
16381 | |
16382 | case CONCAT: |
16383 | case CONCATN: |
16384 | case VAR_LOCATION: |
16385 | case DEBUG_IMPLICIT_PTR: |
16386 | expansion_failed (NULL_TREE, rtl, |
16387 | reason: "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor" ); |
16388 | return 0; |
16389 | |
16390 | case ENTRY_VALUE: |
16391 | if (dwarf_strict && dwarf_version < 5) |
16392 | return NULL; |
16393 | if (REG_P (ENTRY_VALUE_EXP (rtl))) |
16394 | { |
16395 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16396 | || GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
16397 | op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode, |
16398 | VOIDmode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16399 | else |
16400 | { |
16401 | unsigned int debugger_regnum = debugger_reg_number (ENTRY_VALUE_EXP (rtl)); |
16402 | if (debugger_regnum == IGNORED_DWARF_REGNUM) |
16403 | return NULL; |
16404 | op0 = one_reg_loc_descriptor (regno: debugger_regnum, |
16405 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16406 | } |
16407 | } |
16408 | else if (MEM_P (ENTRY_VALUE_EXP (rtl)) |
16409 | && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0))) |
16410 | { |
16411 | op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode, |
16412 | VOIDmode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16413 | if (op0 && op0->dw_loc_opc == DW_OP_fbreg) |
16414 | return NULL; |
16415 | } |
16416 | else |
16417 | gcc_unreachable (); |
16418 | if (op0 == NULL) |
16419 | return NULL; |
16420 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_entry_value), oprnd1: 0, oprnd2: 0); |
16421 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc; |
16422 | mem_loc_result->dw_loc_oprnd1.v.val_loc = op0; |
16423 | break; |
16424 | |
16425 | case DEBUG_PARAMETER_REF: |
16426 | mem_loc_result = parameter_ref_descriptor (rtl); |
16427 | break; |
16428 | |
16429 | case PRE_MODIFY: |
16430 | /* Extract the PLUS expression nested inside and fall into |
16431 | PLUS code below. */ |
16432 | rtl = XEXP (rtl, 1); |
16433 | goto plus; |
16434 | |
16435 | case PRE_INC: |
16436 | case PRE_DEC: |
16437 | /* Turn these into a PLUS expression and fall into the PLUS code |
16438 | below. */ |
16439 | rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0), |
16440 | gen_int_mode (GET_CODE (rtl) == PRE_INC |
16441 | ? GET_MODE_UNIT_SIZE (mem_mode) |
16442 | : -GET_MODE_UNIT_SIZE (mem_mode), |
16443 | mode)); |
16444 | |
16445 | /* fall through */ |
16446 | |
16447 | case PLUS: |
16448 | plus: |
16449 | if (is_based_loc (rtl) |
16450 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16451 | && (GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16452 | || XEXP (rtl, 0) == arg_pointer_rtx |
16453 | || XEXP (rtl, 0) == frame_pointer_rtx)) |
16454 | mem_loc_result = based_loc_descr (XEXP (rtl, 0), |
16455 | INTVAL (XEXP (rtl, 1)), |
16456 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16457 | else |
16458 | { |
16459 | mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
16460 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16461 | if (mem_loc_result == 0) |
16462 | break; |
16463 | |
16464 | if (CONST_INT_P (XEXP (rtl, 1)) |
16465 | && (GET_MODE_SIZE (mode: as_a <scalar_int_mode> (m: mode)) |
16466 | <= DWARF2_ADDR_SIZE)) |
16467 | loc_descr_plus_const (list_head: &mem_loc_result, INTVAL (XEXP (rtl, 1))); |
16468 | else |
16469 | { |
16470 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
16471 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16472 | if (op1 == 0) |
16473 | return NULL; |
16474 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
16475 | add_loc_descr (list_head: &mem_loc_result, |
16476 | descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
16477 | } |
16478 | } |
16479 | break; |
16480 | |
16481 | /* If a pseudo-reg is optimized away, it is possible for it to |
16482 | be replaced with a MEM containing a multiply or shift. */ |
16483 | case MINUS: |
16484 | op = DW_OP_minus; |
16485 | goto do_binop; |
16486 | |
16487 | case MULT: |
16488 | op = DW_OP_mul; |
16489 | goto do_binop; |
16490 | |
16491 | case DIV: |
16492 | if ((!dwarf_strict || dwarf_version >= 5) |
16493 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16494 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
16495 | { |
16496 | mem_loc_result = typed_binop (op: DW_OP_div, rtl, |
16497 | type_die: base_type_for_mode (mode, unsignedp: 0), |
16498 | mode: int_mode, mem_mode); |
16499 | break; |
16500 | } |
16501 | op = DW_OP_div; |
16502 | goto do_binop; |
16503 | |
16504 | case UMOD: |
16505 | op = DW_OP_mod; |
16506 | goto do_binop; |
16507 | |
16508 | case ASHIFT: |
16509 | op = DW_OP_shl; |
16510 | goto do_shift; |
16511 | |
16512 | case ASHIFTRT: |
16513 | op = DW_OP_shra; |
16514 | goto do_shift; |
16515 | |
16516 | case LSHIFTRT: |
16517 | op = DW_OP_shr; |
16518 | goto do_shift; |
16519 | |
16520 | do_shift: |
16521 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16522 | break; |
16523 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: int_mode, mem_mode, |
16524 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16525 | { |
16526 | rtx rtlop1 = XEXP (rtl, 1); |
16527 | if (is_a <scalar_int_mode> (GET_MODE (rtlop1), result: &op1_mode) |
16528 | && GET_MODE_BITSIZE (mode: op1_mode) < GET_MODE_BITSIZE (mode: int_mode)) |
16529 | rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1); |
16530 | op1 = mem_loc_descriptor (rtl: rtlop1, mode: int_mode, mem_mode, |
16531 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16532 | } |
16533 | |
16534 | if (op0 == 0 || op1 == 0) |
16535 | break; |
16536 | |
16537 | mem_loc_result = op0; |
16538 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
16539 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16540 | break; |
16541 | |
16542 | case AND: |
16543 | op = DW_OP_and; |
16544 | goto do_binop; |
16545 | |
16546 | case IOR: |
16547 | op = DW_OP_or; |
16548 | goto do_binop; |
16549 | |
16550 | case XOR: |
16551 | op = DW_OP_xor; |
16552 | goto do_binop; |
16553 | |
16554 | do_binop: |
16555 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
16556 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16557 | if (XEXP (rtl, 0) == XEXP (rtl, 1)) |
16558 | { |
16559 | if (op0 == 0) |
16560 | break; |
16561 | mem_loc_result = op0; |
16562 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0)); |
16563 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16564 | break; |
16565 | } |
16566 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
16567 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16568 | |
16569 | if (op0 == 0 || op1 == 0) |
16570 | break; |
16571 | |
16572 | mem_loc_result = op0; |
16573 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
16574 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16575 | break; |
16576 | |
16577 | case MOD: |
16578 | if ((!dwarf_strict || dwarf_version >= 5) |
16579 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16580 | && GET_MODE_SIZE (mode: int_mode) > DWARF2_ADDR_SIZE) |
16581 | { |
16582 | mem_loc_result = typed_binop (op: DW_OP_mod, rtl, |
16583 | type_die: base_type_for_mode (mode, unsignedp: 0), |
16584 | mode: int_mode, mem_mode); |
16585 | break; |
16586 | } |
16587 | |
16588 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
16589 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16590 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
16591 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16592 | |
16593 | if (op0 == 0 || op1 == 0) |
16594 | break; |
16595 | |
16596 | mem_loc_result = op0; |
16597 | add_loc_descr (list_head: &mem_loc_result, descr: op1); |
16598 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
16599 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
16600 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_div, oprnd1: 0, oprnd2: 0)); |
16601 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
16602 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
16603 | break; |
16604 | |
16605 | case UDIV: |
16606 | if ((!dwarf_strict || dwarf_version >= 5) |
16607 | && is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16608 | { |
16609 | /* We can use a signed divide if the sign bit is not set. */ |
16610 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
16611 | { |
16612 | op = DW_OP_div; |
16613 | goto do_binop; |
16614 | } |
16615 | |
16616 | mem_loc_result = typed_binop (op: DW_OP_div, rtl, |
16617 | type_die: base_type_for_mode (mode: int_mode, unsignedp: 1), |
16618 | mode: int_mode, mem_mode); |
16619 | } |
16620 | break; |
16621 | |
16622 | case NOT: |
16623 | op = DW_OP_not; |
16624 | goto do_unop; |
16625 | |
16626 | case ABS: |
16627 | op = DW_OP_abs; |
16628 | goto do_unop; |
16629 | |
16630 | case NEG: |
16631 | op = DW_OP_neg; |
16632 | goto do_unop; |
16633 | |
16634 | do_unop: |
16635 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, |
16636 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16637 | |
16638 | if (op0 == 0) |
16639 | break; |
16640 | |
16641 | mem_loc_result = op0; |
16642 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16643 | break; |
16644 | |
16645 | case CONST_INT: |
16646 | if (!is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16647 | || GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16648 | #ifdef POINTERS_EXTEND_UNSIGNED |
16649 | || (int_mode == Pmode |
16650 | && mem_mode != VOIDmode |
16651 | && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl)) |
16652 | #endif |
16653 | ) |
16654 | { |
16655 | mem_loc_result = int_loc_descriptor (INTVAL (rtl)); |
16656 | break; |
16657 | } |
16658 | if ((!dwarf_strict || dwarf_version >= 5) |
16659 | && (GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_WIDE_INT |
16660 | || GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_DOUBLE_INT)) |
16661 | { |
16662 | dw_die_ref type_die = base_type_for_mode (mode: int_mode, unsignedp: 1); |
16663 | scalar_int_mode amode; |
16664 | if (type_die == NULL) |
16665 | return NULL; |
16666 | if (INTVAL (rtl) >= 0 |
16667 | && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, limit: 0) |
16668 | .exists (mode: &amode)) |
16669 | && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl) |
16670 | /* const DW_OP_convert <XXX> vs. |
16671 | DW_OP_const_type <XXX, 1, const>. */ |
16672 | && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1 |
16673 | < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (mode: int_mode)) |
16674 | { |
16675 | mem_loc_result = int_loc_descriptor (INTVAL (rtl)); |
16676 | op0 = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16677 | op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16678 | op0->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16679 | op0->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16680 | add_loc_descr (list_head: &mem_loc_result, descr: op0); |
16681 | return mem_loc_result; |
16682 | } |
16683 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, |
16684 | INTVAL (rtl)); |
16685 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16686 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16687 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16688 | if (GET_MODE_BITSIZE (mode: int_mode) == HOST_BITS_PER_WIDE_INT) |
16689 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const; |
16690 | else |
16691 | { |
16692 | mem_loc_result->dw_loc_oprnd2.val_class |
16693 | = dw_val_class_const_double; |
16694 | mem_loc_result->dw_loc_oprnd2.v.val_double |
16695 | = double_int::from_shwi (INTVAL (rtl)); |
16696 | } |
16697 | } |
16698 | break; |
16699 | |
16700 | case CONST_DOUBLE: |
16701 | if (!dwarf_strict || dwarf_version >= 5) |
16702 | { |
16703 | dw_die_ref type_die; |
16704 | |
16705 | /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a |
16706 | CONST_DOUBLE rtx could represent either a large integer |
16707 | or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0, |
16708 | the value is always a floating point constant. |
16709 | |
16710 | When it is an integer, a CONST_DOUBLE is used whenever |
16711 | the constant requires 2 HWIs to be adequately represented. |
16712 | We output CONST_DOUBLEs as blocks. */ |
16713 | if (mode == VOIDmode |
16714 | || (GET_MODE (rtl) == VOIDmode |
16715 | && maybe_ne (a: GET_MODE_BITSIZE (mode), |
16716 | HOST_BITS_PER_DOUBLE_INT))) |
16717 | break; |
16718 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16719 | if (type_die == NULL) |
16720 | return NULL; |
16721 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, oprnd2: 0); |
16722 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16723 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16724 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16725 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
16726 | if (!SCALAR_FLOAT_MODE_P (mode)) |
16727 | { |
16728 | mem_loc_result->dw_loc_oprnd2.val_class |
16729 | = dw_val_class_const_double; |
16730 | mem_loc_result->dw_loc_oprnd2.v.val_double |
16731 | = rtx_to_double_int (rtl); |
16732 | } |
16733 | else |
16734 | #endif |
16735 | { |
16736 | scalar_float_mode float_mode = as_a <scalar_float_mode> (m: mode); |
16737 | unsigned int length = GET_MODE_SIZE (mode: float_mode); |
16738 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
16739 | unsigned int elt_size = insert_float (rtl, array); |
16740 | |
16741 | mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
16742 | mem_loc_result->dw_loc_oprnd2.v.val_vec.length |
16743 | = length / elt_size; |
16744 | mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
16745 | mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
16746 | } |
16747 | } |
16748 | break; |
16749 | |
16750 | case CONST_WIDE_INT: |
16751 | if (!dwarf_strict || dwarf_version >= 5) |
16752 | { |
16753 | dw_die_ref type_die; |
16754 | |
16755 | type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode)); |
16756 | if (type_die == NULL) |
16757 | return NULL; |
16758 | mem_loc_result = new_loc_descr (op: dwarf_OP (op: DW_OP_const_type), oprnd1: 0, oprnd2: 0); |
16759 | mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16760 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16761 | mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16762 | mem_loc_result->dw_loc_oprnd2.val_class |
16763 | = dw_val_class_wide_int; |
16764 | mem_loc_result->dw_loc_oprnd2.v.val_wide |
16765 | = alloc_dw_wide_int (w: rtx_mode_t (rtl, mode)); |
16766 | } |
16767 | break; |
16768 | |
16769 | case CONST_POLY_INT: |
16770 | mem_loc_result = int_loc_descriptor (poly_i: rtx_to_poly_int64 (x: rtl)); |
16771 | break; |
16772 | |
16773 | case EQ: |
16774 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_eq, rtl, mem_mode); |
16775 | break; |
16776 | |
16777 | case GE: |
16778 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_ge, rtl, mem_mode); |
16779 | break; |
16780 | |
16781 | case GT: |
16782 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_gt, rtl, mem_mode); |
16783 | break; |
16784 | |
16785 | case LE: |
16786 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_le, rtl, mem_mode); |
16787 | break; |
16788 | |
16789 | case LT: |
16790 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_lt, rtl, mem_mode); |
16791 | break; |
16792 | |
16793 | case NE: |
16794 | mem_loc_result = scompare_loc_descriptor (op: DW_OP_ne, rtl, mem_mode); |
16795 | break; |
16796 | |
16797 | case GEU: |
16798 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_ge, rtl, mem_mode); |
16799 | break; |
16800 | |
16801 | case GTU: |
16802 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_gt, rtl, mem_mode); |
16803 | break; |
16804 | |
16805 | case LEU: |
16806 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_le, rtl, mem_mode); |
16807 | break; |
16808 | |
16809 | case LTU: |
16810 | mem_loc_result = ucompare_loc_descriptor (op: DW_OP_lt, rtl, mem_mode); |
16811 | break; |
16812 | |
16813 | case UMIN: |
16814 | case UMAX: |
16815 | if (!SCALAR_INT_MODE_P (mode)) |
16816 | break; |
16817 | /* FALLTHRU */ |
16818 | case SMIN: |
16819 | case SMAX: |
16820 | mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode); |
16821 | break; |
16822 | |
16823 | case ZERO_EXTRACT: |
16824 | case SIGN_EXTRACT: |
16825 | if (CONST_INT_P (XEXP (rtl, 1)) |
16826 | && CONST_INT_P (XEXP (rtl, 2)) |
16827 | && is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16828 | && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &inner_mode) |
16829 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
16830 | && GET_MODE_SIZE (mode: inner_mode) <= DWARF2_ADDR_SIZE |
16831 | && ((unsigned) INTVAL (XEXP (rtl, 1)) |
16832 | + (unsigned) INTVAL (XEXP (rtl, 2)) |
16833 | <= GET_MODE_BITSIZE (mode: int_mode))) |
16834 | { |
16835 | int shift, size; |
16836 | op0 = mem_loc_descriptor (XEXP (rtl, 0), mode: inner_mode, |
16837 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16838 | if (op0 == 0) |
16839 | break; |
16840 | if (GET_CODE (rtl) == SIGN_EXTRACT) |
16841 | op = DW_OP_shra; |
16842 | else |
16843 | op = DW_OP_shr; |
16844 | mem_loc_result = op0; |
16845 | size = INTVAL (XEXP (rtl, 1)); |
16846 | shift = INTVAL (XEXP (rtl, 2)); |
16847 | if (BITS_BIG_ENDIAN) |
16848 | shift = GET_MODE_BITSIZE (mode: inner_mode) - shift - size; |
16849 | if (shift + size != (int) DWARF2_ADDR_SIZE) |
16850 | { |
16851 | add_loc_descr (list_head: &mem_loc_result, |
16852 | descr: int_loc_descriptor (DWARF2_ADDR_SIZE |
16853 | - shift - size)); |
16854 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_shl, oprnd1: 0, oprnd2: 0)); |
16855 | } |
16856 | if (size != (int) DWARF2_ADDR_SIZE) |
16857 | { |
16858 | add_loc_descr (list_head: &mem_loc_result, |
16859 | descr: int_loc_descriptor (DWARF2_ADDR_SIZE - size)); |
16860 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
16861 | } |
16862 | } |
16863 | break; |
16864 | |
16865 | case IF_THEN_ELSE: |
16866 | { |
16867 | dw_loc_descr_ref op2, bra_node, drop_node; |
16868 | op0 = mem_loc_descriptor (XEXP (rtl, 0), |
16869 | GET_MODE (XEXP (rtl, 0)) == VOIDmode |
16870 | ? word_mode : GET_MODE (XEXP (rtl, 0)), |
16871 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16872 | op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, |
16873 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16874 | op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode, |
16875 | initialized: VAR_INIT_STATUS_INITIALIZED); |
16876 | if (op0 == NULL || op1 == NULL || op2 == NULL) |
16877 | break; |
16878 | |
16879 | mem_loc_result = op1; |
16880 | add_loc_descr (list_head: &mem_loc_result, descr: op2); |
16881 | add_loc_descr (list_head: &mem_loc_result, descr: op0); |
16882 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
16883 | add_loc_descr (list_head: &mem_loc_result, descr: bra_node); |
16884 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0)); |
16885 | drop_node = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
16886 | add_loc_descr (list_head: &mem_loc_result, descr: drop_node); |
16887 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
16888 | bra_node->dw_loc_oprnd1.v.val_loc = drop_node; |
16889 | } |
16890 | break; |
16891 | |
16892 | case FLOAT_EXTEND: |
16893 | case FLOAT_TRUNCATE: |
16894 | case FLOAT: |
16895 | case UNSIGNED_FLOAT: |
16896 | case FIX: |
16897 | case UNSIGNED_FIX: |
16898 | if (!dwarf_strict || dwarf_version >= 5) |
16899 | { |
16900 | dw_die_ref type_die; |
16901 | dw_loc_descr_ref cvt; |
16902 | |
16903 | op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)), |
16904 | mem_mode, initialized: VAR_INIT_STATUS_INITIALIZED); |
16905 | if (op0 == NULL) |
16906 | break; |
16907 | if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), result: &int_mode) |
16908 | && (GET_CODE (rtl) == FLOAT |
16909 | || GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE)) |
16910 | { |
16911 | type_die = base_type_for_mode (mode: int_mode, |
16912 | GET_CODE (rtl) == UNSIGNED_FLOAT); |
16913 | if (type_die == NULL) |
16914 | break; |
16915 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16916 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16917 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16918 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16919 | add_loc_descr (list_head: &op0, descr: cvt); |
16920 | } |
16921 | type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX); |
16922 | if (type_die == NULL) |
16923 | break; |
16924 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
16925 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
16926 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
16927 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
16928 | add_loc_descr (list_head: &op0, descr: cvt); |
16929 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
16930 | && (GET_CODE (rtl) == FIX |
16931 | || GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE)) |
16932 | { |
16933 | op0 = convert_descriptor_to_mode (mode: int_mode, op: op0); |
16934 | if (op0 == NULL) |
16935 | break; |
16936 | } |
16937 | mem_loc_result = op0; |
16938 | } |
16939 | break; |
16940 | |
16941 | case CLZ: |
16942 | case CTZ: |
16943 | case FFS: |
16944 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16945 | mem_loc_result = clz_loc_descriptor (rtl, mode: int_mode, mem_mode); |
16946 | break; |
16947 | |
16948 | case POPCOUNT: |
16949 | case PARITY: |
16950 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16951 | mem_loc_result = popcount_loc_descriptor (rtl, mode: int_mode, mem_mode); |
16952 | break; |
16953 | |
16954 | case BSWAP: |
16955 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16956 | mem_loc_result = bswap_loc_descriptor (rtl, mode: int_mode, mem_mode); |
16957 | break; |
16958 | |
16959 | case ROTATE: |
16960 | case ROTATERT: |
16961 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
16962 | mem_loc_result = rotate_loc_descriptor (rtl, mode: int_mode, mem_mode); |
16963 | break; |
16964 | |
16965 | case COMPARE: |
16966 | /* In theory, we could implement the above. */ |
16967 | /* DWARF cannot represent the unsigned compare operations |
16968 | natively. */ |
16969 | case SS_MULT: |
16970 | case US_MULT: |
16971 | case SS_DIV: |
16972 | case US_DIV: |
16973 | case SS_PLUS: |
16974 | case US_PLUS: |
16975 | case SS_MINUS: |
16976 | case US_MINUS: |
16977 | case SS_NEG: |
16978 | case US_NEG: |
16979 | case SS_ABS: |
16980 | case SS_ASHIFT: |
16981 | case US_ASHIFT: |
16982 | case SS_TRUNCATE: |
16983 | case US_TRUNCATE: |
16984 | case UNORDERED: |
16985 | case ORDERED: |
16986 | case UNEQ: |
16987 | case UNGE: |
16988 | case UNGT: |
16989 | case UNLE: |
16990 | case UNLT: |
16991 | case LTGT: |
16992 | case FRACT_CONVERT: |
16993 | case UNSIGNED_FRACT_CONVERT: |
16994 | case SAT_FRACT: |
16995 | case UNSIGNED_SAT_FRACT: |
16996 | case SQRT: |
16997 | case ASM_OPERANDS: |
16998 | case VEC_MERGE: |
16999 | case VEC_SELECT: |
17000 | case VEC_CONCAT: |
17001 | case VEC_DUPLICATE: |
17002 | case VEC_SERIES: |
17003 | case HIGH: |
17004 | case FMA: |
17005 | case STRICT_LOW_PART: |
17006 | case CONST_VECTOR: |
17007 | case CONST_FIXED: |
17008 | case CLRSB: |
17009 | case CLOBBER: |
17010 | case SMUL_HIGHPART: |
17011 | case UMUL_HIGHPART: |
17012 | case BITREVERSE: |
17013 | case COPYSIGN: |
17014 | break; |
17015 | |
17016 | case CONST_STRING: |
17017 | resolve_one_addr (&rtl); |
17018 | goto symref; |
17019 | |
17020 | /* RTL sequences inside PARALLEL record a series of DWARF operations for |
17021 | the expression. An UNSPEC rtx represents a raw DWARF operation, |
17022 | new_loc_descr is called for it to build the operation directly. |
17023 | Otherwise mem_loc_descriptor is called recursively. */ |
17024 | case PARALLEL: |
17025 | { |
17026 | int index = 0; |
17027 | dw_loc_descr_ref exp_result = NULL; |
17028 | |
17029 | for (; index < XVECLEN (rtl, 0); index++) |
17030 | { |
17031 | rtx elem = XVECEXP (rtl, 0, index); |
17032 | if (GET_CODE (elem) == UNSPEC) |
17033 | { |
17034 | /* Each DWARF operation UNSPEC contain two operands, if |
17035 | one operand is not used for the operation, const0_rtx is |
17036 | passed. */ |
17037 | gcc_assert (XVECLEN (elem, 0) == 2); |
17038 | |
17039 | HOST_WIDE_INT dw_op = XINT (elem, 1); |
17040 | HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0)); |
17041 | HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1)); |
17042 | exp_result |
17043 | = new_loc_descr (op: (enum dwarf_location_atom) dw_op, oprnd1, |
17044 | oprnd2); |
17045 | } |
17046 | else |
17047 | exp_result |
17048 | = mem_loc_descriptor (rtl: elem, mode, mem_mode, |
17049 | initialized: VAR_INIT_STATUS_INITIALIZED); |
17050 | |
17051 | if (!mem_loc_result) |
17052 | mem_loc_result = exp_result; |
17053 | else |
17054 | add_loc_descr (list_head: &mem_loc_result, descr: exp_result); |
17055 | } |
17056 | |
17057 | break; |
17058 | } |
17059 | |
17060 | default: |
17061 | if (flag_checking) |
17062 | { |
17063 | print_rtl (stderr, rtl); |
17064 | gcc_unreachable (); |
17065 | } |
17066 | break; |
17067 | } |
17068 | |
17069 | if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
17070 | add_loc_descr (list_head: &mem_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
17071 | |
17072 | return mem_loc_result; |
17073 | } |
17074 | |
17075 | /* Return a descriptor that describes the concatenation of two locations. |
17076 | This is typically a complex variable. */ |
17077 | |
17078 | static dw_loc_descr_ref |
17079 | concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized) |
17080 | { |
17081 | /* At present we only track constant-sized pieces. */ |
17082 | unsigned int size0, size1; |
17083 | if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (const_value: &size0) |
17084 | || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (const_value: &size1)) |
17085 | return 0; |
17086 | |
17087 | dw_loc_descr_ref cc_loc_result = NULL; |
17088 | dw_loc_descr_ref x0_ref |
17089 | = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
17090 | dw_loc_descr_ref x1_ref |
17091 | = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
17092 | |
17093 | if (x0_ref == 0 || x1_ref == 0) |
17094 | return 0; |
17095 | |
17096 | cc_loc_result = x0_ref; |
17097 | add_loc_descr_op_piece (list_head: &cc_loc_result, size: size0); |
17098 | |
17099 | add_loc_descr (list_head: &cc_loc_result, descr: x1_ref); |
17100 | add_loc_descr_op_piece (list_head: &cc_loc_result, size: size1); |
17101 | |
17102 | if (initialized == VAR_INIT_STATUS_UNINITIALIZED) |
17103 | add_loc_descr (list_head: &cc_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
17104 | |
17105 | return cc_loc_result; |
17106 | } |
17107 | |
17108 | /* Return a descriptor that describes the concatenation of N |
17109 | locations. */ |
17110 | |
17111 | static dw_loc_descr_ref |
17112 | concatn_loc_descriptor (rtx concatn, enum var_init_status initialized) |
17113 | { |
17114 | unsigned int i; |
17115 | dw_loc_descr_ref cc_loc_result = NULL; |
17116 | unsigned int n = XVECLEN (concatn, 0); |
17117 | unsigned int size; |
17118 | |
17119 | for (i = 0; i < n; ++i) |
17120 | { |
17121 | dw_loc_descr_ref ref; |
17122 | rtx x = XVECEXP (concatn, 0, i); |
17123 | |
17124 | /* At present we only track constant-sized pieces. */ |
17125 | if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (const_value: &size)) |
17126 | return NULL; |
17127 | |
17128 | ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED); |
17129 | if (ref == NULL) |
17130 | return NULL; |
17131 | |
17132 | add_loc_descr (list_head: &cc_loc_result, descr: ref); |
17133 | add_loc_descr_op_piece (list_head: &cc_loc_result, size); |
17134 | } |
17135 | |
17136 | if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED) |
17137 | add_loc_descr (list_head: &cc_loc_result, descr: new_loc_descr (op: DW_OP_GNU_uninit, oprnd1: 0, oprnd2: 0)); |
17138 | |
17139 | return cc_loc_result; |
17140 | } |
17141 | |
17142 | /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer |
17143 | for DEBUG_IMPLICIT_PTR RTL. */ |
17144 | |
17145 | static dw_loc_descr_ref |
17146 | implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset) |
17147 | { |
17148 | dw_loc_descr_ref ret; |
17149 | dw_die_ref ref; |
17150 | |
17151 | if (dwarf_strict && dwarf_version < 5) |
17152 | return NULL; |
17153 | gcc_assert (VAR_P (DEBUG_IMPLICIT_PTR_DECL (rtl)) |
17154 | || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL |
17155 | || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL); |
17156 | ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl)); |
17157 | ret = new_loc_descr (op: dwarf_OP (op: DW_OP_implicit_pointer), oprnd1: 0, oprnd2: offset); |
17158 | ret->dw_loc_oprnd2.val_class = dw_val_class_const; |
17159 | if (ref) |
17160 | { |
17161 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
17162 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
17163 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
17164 | } |
17165 | else |
17166 | { |
17167 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
17168 | ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl); |
17169 | } |
17170 | return ret; |
17171 | } |
17172 | |
17173 | /* Output a proper Dwarf location descriptor for a variable or parameter |
17174 | which is either allocated in a register or in a memory location. For a |
17175 | register, we just generate an OP_REG and the register number. For a |
17176 | memory location we provide a Dwarf postfix expression describing how to |
17177 | generate the (dynamic) address of the object onto the address stack. |
17178 | |
17179 | MODE is mode of the decl if this loc_descriptor is going to be used in |
17180 | .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are |
17181 | allowed, VOIDmode otherwise. |
17182 | |
17183 | If we don't know how to describe it, return 0. */ |
17184 | |
17185 | static dw_loc_descr_ref |
17186 | loc_descriptor (rtx rtl, machine_mode mode, |
17187 | enum var_init_status initialized) |
17188 | { |
17189 | dw_loc_descr_ref loc_result = NULL; |
17190 | scalar_int_mode int_mode; |
17191 | |
17192 | switch (GET_CODE (rtl)) |
17193 | { |
17194 | case SUBREG: |
17195 | /* The case of a subreg may arise when we have a local (register) |
17196 | variable or a formal (register) parameter which doesn't quite fill |
17197 | up an entire register. For now, just assume that it is |
17198 | legitimate to make the Dwarf info refer to the whole register which |
17199 | contains the given subreg. */ |
17200 | if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl)) |
17201 | loc_result = loc_descriptor (SUBREG_REG (rtl), |
17202 | GET_MODE (SUBREG_REG (rtl)), initialized); |
17203 | else |
17204 | goto do_default; |
17205 | break; |
17206 | |
17207 | case REG: |
17208 | loc_result = reg_loc_descriptor (rtl, initialized); |
17209 | break; |
17210 | |
17211 | case MEM: |
17212 | loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode: get_address_mode (mem: rtl), |
17213 | GET_MODE (rtl), initialized); |
17214 | if (loc_result == NULL) |
17215 | loc_result = tls_mem_loc_descriptor (mem: rtl); |
17216 | if (loc_result == NULL) |
17217 | { |
17218 | rtx new_rtl = avoid_constant_pool_reference (rtl); |
17219 | if (new_rtl != rtl) |
17220 | loc_result = loc_descriptor (rtl: new_rtl, mode, initialized); |
17221 | } |
17222 | break; |
17223 | |
17224 | case CONCAT: |
17225 | loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1), |
17226 | initialized); |
17227 | break; |
17228 | |
17229 | case CONCATN: |
17230 | loc_result = concatn_loc_descriptor (concatn: rtl, initialized); |
17231 | break; |
17232 | |
17233 | case VAR_LOCATION: |
17234 | /* Single part. */ |
17235 | if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL) |
17236 | { |
17237 | rtx loc = PAT_VAR_LOCATION_LOC (rtl); |
17238 | if (GET_CODE (loc) == EXPR_LIST) |
17239 | loc = XEXP (loc, 0); |
17240 | loc_result = loc_descriptor (rtl: loc, mode, initialized); |
17241 | break; |
17242 | } |
17243 | |
17244 | rtl = XEXP (rtl, 1); |
17245 | /* FALLTHRU */ |
17246 | |
17247 | case PARALLEL: |
17248 | { |
17249 | rtvec par_elems = XVEC (rtl, 0); |
17250 | int num_elem = GET_NUM_ELEM (par_elems); |
17251 | machine_mode mode; |
17252 | int i, size; |
17253 | |
17254 | /* Create the first one, so we have something to add to. */ |
17255 | loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0), |
17256 | VOIDmode, initialized); |
17257 | if (loc_result == NULL) |
17258 | return NULL; |
17259 | mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0)); |
17260 | /* At present we only track constant-sized pieces. */ |
17261 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
17262 | return NULL; |
17263 | add_loc_descr_op_piece (list_head: &loc_result, size); |
17264 | for (i = 1; i < num_elem; i++) |
17265 | { |
17266 | dw_loc_descr_ref temp; |
17267 | |
17268 | temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0), |
17269 | VOIDmode, initialized); |
17270 | if (temp == NULL) |
17271 | return NULL; |
17272 | add_loc_descr (list_head: &loc_result, descr: temp); |
17273 | mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0)); |
17274 | /* At present we only track constant-sized pieces. */ |
17275 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
17276 | return NULL; |
17277 | add_loc_descr_op_piece (list_head: &loc_result, size); |
17278 | } |
17279 | } |
17280 | break; |
17281 | |
17282 | case CONST_INT: |
17283 | if (mode != VOIDmode && mode != BLKmode) |
17284 | { |
17285 | int_mode = as_a <scalar_int_mode> (m: mode); |
17286 | loc_result = address_of_int_loc_descriptor (size: GET_MODE_SIZE (mode: int_mode), |
17287 | INTVAL (rtl)); |
17288 | } |
17289 | break; |
17290 | |
17291 | case CONST_DOUBLE: |
17292 | if (mode == VOIDmode) |
17293 | mode = GET_MODE (rtl); |
17294 | |
17295 | if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict)) |
17296 | { |
17297 | gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl)); |
17298 | |
17299 | /* Note that a CONST_DOUBLE rtx could represent either an integer |
17300 | or a floating-point constant. A CONST_DOUBLE is used whenever |
17301 | the constant requires more than one word in order to be |
17302 | adequately represented. We output CONST_DOUBLEs as blocks. */ |
17303 | scalar_mode smode = as_a <scalar_mode> (m: mode); |
17304 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
17305 | oprnd1: GET_MODE_SIZE (mode: smode), oprnd2: 0); |
17306 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
17307 | if (!SCALAR_FLOAT_MODE_P (smode)) |
17308 | { |
17309 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double; |
17310 | loc_result->dw_loc_oprnd2.v.val_double |
17311 | = rtx_to_double_int (rtl); |
17312 | } |
17313 | else |
17314 | #endif |
17315 | { |
17316 | unsigned int length = GET_MODE_SIZE (mode: smode); |
17317 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
17318 | unsigned int elt_size = insert_float (rtl, array); |
17319 | |
17320 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
17321 | loc_result->dw_loc_oprnd2.v.val_vec.length = length / elt_size; |
17322 | loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
17323 | loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
17324 | } |
17325 | } |
17326 | break; |
17327 | |
17328 | case CONST_WIDE_INT: |
17329 | if (mode == VOIDmode) |
17330 | mode = GET_MODE (rtl); |
17331 | |
17332 | if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict)) |
17333 | { |
17334 | int_mode = as_a <scalar_int_mode> (m: mode); |
17335 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
17336 | oprnd1: GET_MODE_SIZE (mode: int_mode), oprnd2: 0); |
17337 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int; |
17338 | loc_result->dw_loc_oprnd2.v.val_wide |
17339 | = alloc_dw_wide_int (w: rtx_mode_t (rtl, int_mode)); |
17340 | } |
17341 | break; |
17342 | |
17343 | case CONST_VECTOR: |
17344 | if (mode == VOIDmode) |
17345 | mode = GET_MODE (rtl); |
17346 | |
17347 | if (mode != VOIDmode |
17348 | /* The combination of a length and byte elt_size doesn't extend |
17349 | naturally to boolean vectors, where several elements are packed |
17350 | into the same byte. */ |
17351 | && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL |
17352 | && (dwarf_version >= 4 || !dwarf_strict)) |
17353 | { |
17354 | unsigned int length; |
17355 | if (!CONST_VECTOR_NUNITS (rtl).is_constant (const_value: &length)) |
17356 | return NULL; |
17357 | |
17358 | unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl)); |
17359 | unsigned char *array |
17360 | = ggc_vec_alloc<unsigned char> (c: length * elt_size); |
17361 | unsigned int i; |
17362 | unsigned char *p; |
17363 | machine_mode imode = GET_MODE_INNER (mode); |
17364 | |
17365 | gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl)); |
17366 | switch (GET_MODE_CLASS (mode)) |
17367 | { |
17368 | case MODE_VECTOR_INT: |
17369 | for (i = 0, p = array; i < length; i++, p += elt_size) |
17370 | { |
17371 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
17372 | insert_wide_int (rtx_mode_t (elt, imode), p, elt_size); |
17373 | } |
17374 | break; |
17375 | |
17376 | case MODE_VECTOR_FLOAT: |
17377 | for (i = 0, p = array; i < length; i++, p += elt_size) |
17378 | { |
17379 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
17380 | insert_float (elt, p); |
17381 | } |
17382 | break; |
17383 | |
17384 | default: |
17385 | gcc_unreachable (); |
17386 | } |
17387 | |
17388 | loc_result = new_loc_descr (op: DW_OP_implicit_value, |
17389 | oprnd1: length * elt_size, oprnd2: 0); |
17390 | loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec; |
17391 | loc_result->dw_loc_oprnd2.v.val_vec.length = length; |
17392 | loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size; |
17393 | loc_result->dw_loc_oprnd2.v.val_vec.array = array; |
17394 | } |
17395 | break; |
17396 | |
17397 | case CONST: |
17398 | if (mode == VOIDmode |
17399 | || CONST_SCALAR_INT_P (XEXP (rtl, 0)) |
17400 | || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0)) |
17401 | || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR) |
17402 | { |
17403 | loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized); |
17404 | break; |
17405 | } |
17406 | /* FALLTHROUGH */ |
17407 | case SYMBOL_REF: |
17408 | if (!const_ok_for_output (rtl)) |
17409 | break; |
17410 | /* FALLTHROUGH */ |
17411 | case LABEL_REF: |
17412 | if (is_a <scalar_int_mode> (m: mode, result: &int_mode) |
17413 | && GET_MODE_SIZE (mode: int_mode) == DWARF2_ADDR_SIZE |
17414 | && (dwarf_version >= 4 || !dwarf_strict)) |
17415 | { |
17416 | loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
17417 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
17418 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
17419 | } |
17420 | break; |
17421 | |
17422 | case DEBUG_IMPLICIT_PTR: |
17423 | loc_result = implicit_ptr_descriptor (rtl, offset: 0); |
17424 | break; |
17425 | |
17426 | case PLUS: |
17427 | if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR |
17428 | && CONST_INT_P (XEXP (rtl, 1))) |
17429 | { |
17430 | loc_result |
17431 | = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1))); |
17432 | break; |
17433 | } |
17434 | /* FALLTHRU */ |
17435 | do_default: |
17436 | default: |
17437 | if ((is_a <scalar_int_mode> (m: mode, result: &int_mode) |
17438 | && GET_MODE (rtl) == int_mode |
17439 | && GET_MODE_SIZE (mode: int_mode) <= DWARF2_ADDR_SIZE |
17440 | && dwarf_version >= 4) |
17441 | || (!dwarf_strict && mode != VOIDmode && mode != BLKmode)) |
17442 | { |
17443 | /* Value expression. */ |
17444 | loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized); |
17445 | if (loc_result) |
17446 | add_loc_descr (list_head: &loc_result, |
17447 | descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
17448 | } |
17449 | break; |
17450 | } |
17451 | |
17452 | return loc_result; |
17453 | } |
17454 | |
17455 | /* We need to figure out what section we should use as the base for the |
17456 | address ranges where a given location is valid. |
17457 | 1. If this particular DECL has a section associated with it, use that. |
17458 | 2. If this function has a section associated with it, use that. |
17459 | 3. Otherwise, use the text section. |
17460 | XXX: If you split a variable across multiple sections, we won't notice. */ |
17461 | |
17462 | static const char * |
17463 | secname_for_decl (const_tree decl) |
17464 | { |
17465 | const char *secname; |
17466 | |
17467 | if (VAR_OR_FUNCTION_DECL_P (decl) |
17468 | && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl)) |
17469 | && DECL_SECTION_NAME (decl)) |
17470 | secname = DECL_SECTION_NAME (decl); |
17471 | else if (current_function_decl && DECL_SECTION_NAME (current_function_decl)) |
17472 | { |
17473 | if (in_cold_section_p) |
17474 | { |
17475 | section *sec = current_function_section (); |
17476 | if (sec->common.flags & SECTION_NAMED) |
17477 | return sec->named.name; |
17478 | } |
17479 | secname = DECL_SECTION_NAME (current_function_decl); |
17480 | } |
17481 | else if (cfun && in_cold_section_p) |
17482 | secname = crtl->subsections.cold_section_label; |
17483 | else |
17484 | secname = text_section_label; |
17485 | |
17486 | return secname; |
17487 | } |
17488 | |
17489 | /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */ |
17490 | |
17491 | static bool |
17492 | decl_by_reference_p (tree decl) |
17493 | { |
17494 | return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL |
17495 | || VAR_P (decl)) |
17496 | && DECL_BY_REFERENCE (decl)); |
17497 | } |
17498 | |
17499 | /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor |
17500 | for VARLOC. */ |
17501 | |
17502 | static dw_loc_descr_ref |
17503 | dw_loc_list_1 (tree loc, rtx varloc, int want_address, |
17504 | enum var_init_status initialized) |
17505 | { |
17506 | int have_address = 0; |
17507 | dw_loc_descr_ref descr; |
17508 | machine_mode mode; |
17509 | |
17510 | if (want_address != 2) |
17511 | { |
17512 | gcc_assert (GET_CODE (varloc) == VAR_LOCATION); |
17513 | /* Single part. */ |
17514 | if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL) |
17515 | { |
17516 | varloc = PAT_VAR_LOCATION_LOC (varloc); |
17517 | if (GET_CODE (varloc) == EXPR_LIST) |
17518 | varloc = XEXP (varloc, 0); |
17519 | mode = GET_MODE (varloc); |
17520 | if (MEM_P (varloc)) |
17521 | { |
17522 | rtx addr = XEXP (varloc, 0); |
17523 | descr = mem_loc_descriptor (rtl: addr, mode: get_address_mode (mem: varloc), |
17524 | mem_mode: mode, initialized); |
17525 | if (descr) |
17526 | have_address = 1; |
17527 | else |
17528 | { |
17529 | rtx x = avoid_constant_pool_reference (varloc); |
17530 | if (x != varloc) |
17531 | descr = mem_loc_descriptor (rtl: x, mode, VOIDmode, |
17532 | initialized); |
17533 | } |
17534 | } |
17535 | else |
17536 | descr = mem_loc_descriptor (rtl: varloc, mode, VOIDmode, initialized); |
17537 | } |
17538 | else |
17539 | return 0; |
17540 | } |
17541 | else |
17542 | { |
17543 | if (GET_CODE (varloc) == VAR_LOCATION) |
17544 | mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc)); |
17545 | else |
17546 | mode = DECL_MODE (loc); |
17547 | descr = loc_descriptor (rtl: varloc, mode, initialized); |
17548 | have_address = 1; |
17549 | } |
17550 | |
17551 | if (!descr) |
17552 | return 0; |
17553 | |
17554 | if (want_address == 2 && !have_address |
17555 | && (dwarf_version >= 4 || !dwarf_strict)) |
17556 | { |
17557 | if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE) |
17558 | { |
17559 | expansion_failed (expr: loc, NULL_RTX, |
17560 | reason: "DWARF address size mismatch" ); |
17561 | return 0; |
17562 | } |
17563 | add_loc_descr (list_head: &descr, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
17564 | have_address = 1; |
17565 | } |
17566 | /* Show if we can't fill the request for an address. */ |
17567 | if (want_address && !have_address) |
17568 | { |
17569 | expansion_failed (expr: loc, NULL_RTX, |
17570 | reason: "Want address and only have value" ); |
17571 | return 0; |
17572 | } |
17573 | |
17574 | /* If we've got an address and don't want one, dereference. */ |
17575 | if (!want_address && have_address) |
17576 | { |
17577 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc)); |
17578 | enum dwarf_location_atom op; |
17579 | |
17580 | if (size > DWARF2_ADDR_SIZE || size == -1) |
17581 | { |
17582 | expansion_failed (expr: loc, NULL_RTX, |
17583 | reason: "DWARF address size mismatch" ); |
17584 | return 0; |
17585 | } |
17586 | else if (size == DWARF2_ADDR_SIZE) |
17587 | op = DW_OP_deref; |
17588 | else |
17589 | op = DW_OP_deref_size; |
17590 | |
17591 | add_loc_descr (list_head: &descr, descr: new_loc_descr (op, oprnd1: size, oprnd2: 0)); |
17592 | } |
17593 | |
17594 | return descr; |
17595 | } |
17596 | |
17597 | /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL |
17598 | if it is not possible. */ |
17599 | |
17600 | static dw_loc_descr_ref |
17601 | new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset) |
17602 | { |
17603 | if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0) |
17604 | return new_loc_descr (op: DW_OP_piece, oprnd1: bitsize / BITS_PER_UNIT, oprnd2: 0); |
17605 | else if (dwarf_version >= 3 || !dwarf_strict) |
17606 | return new_loc_descr (op: DW_OP_bit_piece, oprnd1: bitsize, oprnd2: offset); |
17607 | else |
17608 | return NULL; |
17609 | } |
17610 | |
17611 | /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor |
17612 | for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */ |
17613 | |
17614 | static dw_loc_descr_ref |
17615 | dw_sra_loc_expr (tree decl, rtx loc) |
17616 | { |
17617 | rtx p; |
17618 | unsigned HOST_WIDE_INT padsize = 0; |
17619 | dw_loc_descr_ref descr, *descr_tail; |
17620 | unsigned HOST_WIDE_INT decl_size; |
17621 | rtx varloc; |
17622 | enum var_init_status initialized; |
17623 | |
17624 | if (DECL_SIZE (decl) == NULL |
17625 | || !tree_fits_uhwi_p (DECL_SIZE (decl))) |
17626 | return NULL; |
17627 | |
17628 | decl_size = tree_to_uhwi (DECL_SIZE (decl)); |
17629 | descr = NULL; |
17630 | descr_tail = &descr; |
17631 | |
17632 | for (p = loc; p; p = XEXP (p, 1)) |
17633 | { |
17634 | unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (piece: p); |
17635 | rtx loc_note = *decl_piece_varloc_ptr (piece: p); |
17636 | dw_loc_descr_ref cur_descr; |
17637 | dw_loc_descr_ref *tail, last = NULL; |
17638 | unsigned HOST_WIDE_INT opsize = 0; |
17639 | |
17640 | if (loc_note == NULL_RTX |
17641 | || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX) |
17642 | { |
17643 | padsize += bitsize; |
17644 | continue; |
17645 | } |
17646 | initialized = NOTE_VAR_LOCATION_STATUS (loc_note); |
17647 | varloc = NOTE_VAR_LOCATION (loc_note); |
17648 | cur_descr = dw_loc_list_1 (loc: decl, varloc, want_address: 2, initialized); |
17649 | if (cur_descr == NULL) |
17650 | { |
17651 | padsize += bitsize; |
17652 | continue; |
17653 | } |
17654 | |
17655 | /* Check that cur_descr either doesn't use |
17656 | DW_OP_*piece operations, or their sum is equal |
17657 | to bitsize. Otherwise we can't embed it. */ |
17658 | for (tail = &cur_descr; *tail != NULL; |
17659 | tail = &(*tail)->dw_loc_next) |
17660 | if ((*tail)->dw_loc_opc == DW_OP_piece) |
17661 | { |
17662 | opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned |
17663 | * BITS_PER_UNIT; |
17664 | last = *tail; |
17665 | } |
17666 | else if ((*tail)->dw_loc_opc == DW_OP_bit_piece) |
17667 | { |
17668 | opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned; |
17669 | last = *tail; |
17670 | } |
17671 | |
17672 | if (last != NULL && opsize != bitsize) |
17673 | { |
17674 | padsize += bitsize; |
17675 | /* Discard the current piece of the descriptor and release any |
17676 | addr_table entries it uses. */ |
17677 | remove_loc_list_addr_table_entries (descr: cur_descr); |
17678 | continue; |
17679 | } |
17680 | |
17681 | /* If there is a hole, add DW_OP_*piece after empty DWARF |
17682 | expression, which means that those bits are optimized out. */ |
17683 | if (padsize) |
17684 | { |
17685 | if (padsize > decl_size) |
17686 | { |
17687 | remove_loc_list_addr_table_entries (descr: cur_descr); |
17688 | goto discard_descr; |
17689 | } |
17690 | decl_size -= padsize; |
17691 | *descr_tail = new_loc_descr_op_bit_piece (bitsize: padsize, offset: 0); |
17692 | if (*descr_tail == NULL) |
17693 | { |
17694 | remove_loc_list_addr_table_entries (descr: cur_descr); |
17695 | goto discard_descr; |
17696 | } |
17697 | descr_tail = &(*descr_tail)->dw_loc_next; |
17698 | padsize = 0; |
17699 | } |
17700 | *descr_tail = cur_descr; |
17701 | descr_tail = tail; |
17702 | if (bitsize > decl_size) |
17703 | goto discard_descr; |
17704 | decl_size -= bitsize; |
17705 | if (last == NULL) |
17706 | { |
17707 | HOST_WIDE_INT offset = 0; |
17708 | if (GET_CODE (varloc) == VAR_LOCATION |
17709 | && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL) |
17710 | { |
17711 | varloc = PAT_VAR_LOCATION_LOC (varloc); |
17712 | if (GET_CODE (varloc) == EXPR_LIST) |
17713 | varloc = XEXP (varloc, 0); |
17714 | } |
17715 | do |
17716 | { |
17717 | if (GET_CODE (varloc) == CONST |
17718 | || GET_CODE (varloc) == SIGN_EXTEND |
17719 | || GET_CODE (varloc) == ZERO_EXTEND) |
17720 | varloc = XEXP (varloc, 0); |
17721 | else if (GET_CODE (varloc) == SUBREG) |
17722 | varloc = SUBREG_REG (varloc); |
17723 | else |
17724 | break; |
17725 | } |
17726 | while (1); |
17727 | /* DW_OP_bit_size offset should be zero for register |
17728 | or implicit location descriptions and empty location |
17729 | descriptions, but for memory addresses needs big endian |
17730 | adjustment. */ |
17731 | if (MEM_P (varloc)) |
17732 | { |
17733 | unsigned HOST_WIDE_INT memsize; |
17734 | if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (const_value: &memsize)) |
17735 | goto discard_descr; |
17736 | memsize *= BITS_PER_UNIT; |
17737 | if (memsize != bitsize) |
17738 | { |
17739 | if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN |
17740 | && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD)) |
17741 | goto discard_descr; |
17742 | if (memsize < bitsize) |
17743 | goto discard_descr; |
17744 | if (BITS_BIG_ENDIAN) |
17745 | offset = memsize - bitsize; |
17746 | } |
17747 | } |
17748 | |
17749 | *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset); |
17750 | if (*descr_tail == NULL) |
17751 | goto discard_descr; |
17752 | descr_tail = &(*descr_tail)->dw_loc_next; |
17753 | } |
17754 | } |
17755 | |
17756 | /* If there were any non-empty expressions, add padding till the end of |
17757 | the decl. */ |
17758 | if (descr != NULL && decl_size != 0) |
17759 | { |
17760 | *descr_tail = new_loc_descr_op_bit_piece (bitsize: decl_size, offset: 0); |
17761 | if (*descr_tail == NULL) |
17762 | goto discard_descr; |
17763 | } |
17764 | return descr; |
17765 | |
17766 | discard_descr: |
17767 | /* Discard the descriptor and release any addr_table entries it uses. */ |
17768 | remove_loc_list_addr_table_entries (descr); |
17769 | return NULL; |
17770 | } |
17771 | |
17772 | /* Return the dwarf representation of the location list LOC_LIST of |
17773 | DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree |
17774 | function. */ |
17775 | |
17776 | static dw_loc_list_ref |
17777 | dw_loc_list (var_loc_list *loc_list, tree decl, int want_address) |
17778 | { |
17779 | const char *endname, *secname; |
17780 | var_loc_view endview; |
17781 | rtx varloc; |
17782 | enum var_init_status initialized; |
17783 | struct var_loc_node *node; |
17784 | dw_loc_descr_ref descr; |
17785 | char label_id[MAX_ARTIFICIAL_LABEL_BYTES]; |
17786 | dw_loc_list_ref list = NULL; |
17787 | dw_loc_list_ref *listp = &list; |
17788 | |
17789 | /* Now that we know what section we are using for a base, |
17790 | actually construct the list of locations. |
17791 | The first location information is what is passed to the |
17792 | function that creates the location list, and the remaining |
17793 | locations just get added on to that list. |
17794 | Note that we only know the start address for a location |
17795 | (IE location changes), so to build the range, we use |
17796 | the range [current location start, next location start]. |
17797 | This means we have to special case the last node, and generate |
17798 | a range of [last location start, end of function label]. */ |
17799 | |
17800 | if (cfun && crtl->has_bb_partition) |
17801 | { |
17802 | bool save_in_cold_section_p = in_cold_section_p; |
17803 | in_cold_section_p = first_function_block_is_cold; |
17804 | if (loc_list->last_before_switch == NULL) |
17805 | in_cold_section_p = !in_cold_section_p; |
17806 | secname = secname_for_decl (decl); |
17807 | in_cold_section_p = save_in_cold_section_p; |
17808 | } |
17809 | else |
17810 | secname = secname_for_decl (decl); |
17811 | |
17812 | for (node = loc_list->first; node; node = node->next) |
17813 | { |
17814 | bool range_across_switch = false; |
17815 | if (GET_CODE (node->loc) == EXPR_LIST |
17816 | || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX) |
17817 | { |
17818 | if (GET_CODE (node->loc) == EXPR_LIST) |
17819 | { |
17820 | descr = NULL; |
17821 | /* This requires DW_OP_{,bit_}piece, which is not usable |
17822 | inside DWARF expressions. */ |
17823 | if (want_address == 2) |
17824 | descr = dw_sra_loc_expr (decl, loc: node->loc); |
17825 | } |
17826 | else |
17827 | { |
17828 | initialized = NOTE_VAR_LOCATION_STATUS (node->loc); |
17829 | varloc = NOTE_VAR_LOCATION (node->loc); |
17830 | descr = dw_loc_list_1 (loc: decl, varloc, want_address, initialized); |
17831 | } |
17832 | if (descr) |
17833 | { |
17834 | /* If section switch happens in between node->label |
17835 | and node->next->label (or end of function) and |
17836 | we can't emit it as a single entry list, |
17837 | emit two ranges, first one ending at the end |
17838 | of first partition and second one starting at the |
17839 | beginning of second partition. */ |
17840 | if (node == loc_list->last_before_switch |
17841 | && (node != loc_list->first || loc_list->first->next |
17842 | /* If we are to emit a view number, we will emit |
17843 | a loclist rather than a single location |
17844 | expression for the entire function (see |
17845 | loc_list_has_views), so we have to split the |
17846 | range that straddles across partitions. */ |
17847 | || !ZERO_VIEW_P (node->view)) |
17848 | && current_function_decl) |
17849 | { |
17850 | endname = cfun->fde->dw_fde_end; |
17851 | endview = 0; |
17852 | range_across_switch = true; |
17853 | } |
17854 | /* The variable has a location between NODE->LABEL and |
17855 | NODE->NEXT->LABEL. */ |
17856 | else if (node->next) |
17857 | endname = node->next->label, endview = node->next->view; |
17858 | /* If the variable has a location at the last label |
17859 | it keeps its location until the end of function. */ |
17860 | else if (!current_function_decl) |
17861 | endname = text_end_label, endview = 0; |
17862 | else |
17863 | { |
17864 | ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL, |
17865 | current_function_funcdef_no); |
17866 | endname = ggc_strdup (label_id); |
17867 | endview = 0; |
17868 | } |
17869 | |
17870 | *listp = new_loc_list (expr: descr, begin: node->label, vbegin: node->view, |
17871 | end: endname, vend: endview, section: secname); |
17872 | if (TREE_CODE (decl) == PARM_DECL |
17873 | && node == loc_list->first |
17874 | && NOTE_P (node->loc) |
17875 | && strcmp (s1: node->label, s2: endname) == 0) |
17876 | (*listp)->force = true; |
17877 | listp = &(*listp)->dw_loc_next; |
17878 | } |
17879 | } |
17880 | |
17881 | if (cfun |
17882 | && crtl->has_bb_partition |
17883 | && node == loc_list->last_before_switch) |
17884 | { |
17885 | bool save_in_cold_section_p = in_cold_section_p; |
17886 | in_cold_section_p = !first_function_block_is_cold; |
17887 | secname = secname_for_decl (decl); |
17888 | in_cold_section_p = save_in_cold_section_p; |
17889 | } |
17890 | |
17891 | if (range_across_switch) |
17892 | { |
17893 | if (GET_CODE (node->loc) == EXPR_LIST) |
17894 | descr = dw_sra_loc_expr (decl, loc: node->loc); |
17895 | else |
17896 | { |
17897 | initialized = NOTE_VAR_LOCATION_STATUS (node->loc); |
17898 | varloc = NOTE_VAR_LOCATION (node->loc); |
17899 | descr = dw_loc_list_1 (loc: decl, varloc, want_address, |
17900 | initialized); |
17901 | } |
17902 | gcc_assert (descr); |
17903 | /* The variable has a location between NODE->LABEL and |
17904 | NODE->NEXT->LABEL. */ |
17905 | if (node->next) |
17906 | endname = node->next->label, endview = node->next->view; |
17907 | else |
17908 | endname = cfun->fde->dw_fde_second_end, endview = 0; |
17909 | *listp = new_loc_list (expr: descr, cfun->fde->dw_fde_second_begin, vbegin: 0, |
17910 | end: endname, vend: endview, section: secname); |
17911 | listp = &(*listp)->dw_loc_next; |
17912 | } |
17913 | } |
17914 | |
17915 | /* Try to avoid the overhead of a location list emitting a location |
17916 | expression instead, but only if we didn't have more than one |
17917 | location entry in the first place. If some entries were not |
17918 | representable, we don't want to pretend a single entry that was |
17919 | applies to the entire scope in which the variable is |
17920 | available. */ |
17921 | if (list && loc_list->first->next) |
17922 | gen_llsym (list); |
17923 | else |
17924 | maybe_gen_llsym (list); |
17925 | |
17926 | return list; |
17927 | } |
17928 | |
17929 | /* Return true if the loc_list has only single element and thus |
17930 | can be represented as location description. */ |
17931 | |
17932 | static bool |
17933 | single_element_loc_list_p (dw_loc_list_ref list) |
17934 | { |
17935 | gcc_assert (!list->dw_loc_next || list->ll_symbol); |
17936 | return !list->ll_symbol; |
17937 | } |
17938 | |
17939 | /* Duplicate a single element of location list. */ |
17940 | |
17941 | static inline dw_loc_descr_ref |
17942 | copy_loc_descr (dw_loc_descr_ref ref) |
17943 | { |
17944 | dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> (); |
17945 | memcpy (dest: copy, src: ref, n: sizeof (dw_loc_descr_node)); |
17946 | return copy; |
17947 | } |
17948 | |
17949 | /* To each location in list LIST append loc descr REF. */ |
17950 | |
17951 | static void |
17952 | add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref) |
17953 | { |
17954 | dw_loc_descr_ref copy; |
17955 | add_loc_descr (list_head: &list->expr, descr: ref); |
17956 | list = list->dw_loc_next; |
17957 | while (list) |
17958 | { |
17959 | copy = copy_loc_descr (ref); |
17960 | add_loc_descr (list_head: &list->expr, descr: copy); |
17961 | while (copy->dw_loc_next) |
17962 | copy = copy->dw_loc_next = copy_loc_descr (ref: copy->dw_loc_next); |
17963 | list = list->dw_loc_next; |
17964 | } |
17965 | } |
17966 | |
17967 | /* To each location in list LIST prepend loc descr REF. */ |
17968 | |
17969 | static void |
17970 | prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref) |
17971 | { |
17972 | dw_loc_descr_ref copy; |
17973 | dw_loc_descr_ref ref_end = list->expr; |
17974 | add_loc_descr (list_head: &ref, descr: list->expr); |
17975 | list->expr = ref; |
17976 | list = list->dw_loc_next; |
17977 | while (list) |
17978 | { |
17979 | dw_loc_descr_ref end = list->expr; |
17980 | list->expr = copy = copy_loc_descr (ref); |
17981 | while (copy->dw_loc_next != ref_end) |
17982 | copy = copy->dw_loc_next = copy_loc_descr (ref: copy->dw_loc_next); |
17983 | copy->dw_loc_next = end; |
17984 | list = list->dw_loc_next; |
17985 | } |
17986 | } |
17987 | |
17988 | /* Given two lists RET and LIST |
17989 | produce location list that is result of adding expression in LIST |
17990 | to expression in RET on each position in program. |
17991 | Might be destructive on both RET and LIST. |
17992 | |
17993 | TODO: We handle only simple cases of RET or LIST having at most one |
17994 | element. General case would involve sorting the lists in program order |
17995 | and merging them that will need some additional work. |
17996 | Adding that will improve quality of debug info especially for SRA-ed |
17997 | structures. */ |
17998 | |
17999 | static void |
18000 | add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list) |
18001 | { |
18002 | if (!list) |
18003 | return; |
18004 | if (!*ret) |
18005 | { |
18006 | *ret = list; |
18007 | return; |
18008 | } |
18009 | if (!list->dw_loc_next) |
18010 | { |
18011 | add_loc_descr_to_each (list: *ret, ref: list->expr); |
18012 | return; |
18013 | } |
18014 | if (!(*ret)->dw_loc_next) |
18015 | { |
18016 | prepend_loc_descr_to_each (list, ref: (*ret)->expr); |
18017 | *ret = list; |
18018 | return; |
18019 | } |
18020 | expansion_failed (NULL_TREE, NULL_RTX, |
18021 | reason: "Don't know how to merge two non-trivial" |
18022 | " location lists.\n" ); |
18023 | *ret = NULL; |
18024 | return; |
18025 | } |
18026 | |
18027 | /* LOC is constant expression. Try a luck, look it up in constant |
18028 | pool and return its loc_descr of its address. */ |
18029 | |
18030 | static dw_loc_descr_ref |
18031 | cst_pool_loc_descr (tree loc) |
18032 | { |
18033 | /* Get an RTL for this, if something has been emitted. */ |
18034 | rtx rtl = lookup_constant_def (loc); |
18035 | |
18036 | if (!rtl || !MEM_P (rtl)) |
18037 | { |
18038 | gcc_assert (!rtl); |
18039 | return 0; |
18040 | } |
18041 | gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF); |
18042 | |
18043 | /* TODO: We might get more coverage if we was actually delaying expansion |
18044 | of all expressions till end of compilation when constant pools are fully |
18045 | populated. */ |
18046 | if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0)))) |
18047 | { |
18048 | expansion_failed (expr: loc, NULL_RTX, |
18049 | reason: "CST value in contant pool but not marked." ); |
18050 | return 0; |
18051 | } |
18052 | return mem_loc_descriptor (XEXP (rtl, 0), mode: get_address_mode (mem: rtl), |
18053 | GET_MODE (rtl), initialized: VAR_INIT_STATUS_INITIALIZED); |
18054 | } |
18055 | |
18056 | /* Return dw_loc_list representing address of addr_expr LOC |
18057 | by looking for inner INDIRECT_REF expression and turning |
18058 | it into simple arithmetics. |
18059 | |
18060 | See loc_list_from_tree for the meaning of CONTEXT. */ |
18061 | |
18062 | static dw_loc_list_ref |
18063 | loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev, |
18064 | loc_descr_context *context) |
18065 | { |
18066 | tree obj, offset; |
18067 | poly_int64 bitsize, bitpos, bytepos; |
18068 | machine_mode mode; |
18069 | int unsignedp, reversep, volatilep = 0; |
18070 | dw_loc_list_ref list_ret = NULL, list_ret1 = NULL; |
18071 | |
18072 | obj = get_inner_reference (TREE_OPERAND (loc, 0), |
18073 | &bitsize, &bitpos, &offset, &mode, |
18074 | &unsignedp, &reversep, &volatilep); |
18075 | STRIP_NOPS (obj); |
18076 | if (!multiple_p (a: bitpos, BITS_PER_UNIT, multiple: &bytepos)) |
18077 | { |
18078 | expansion_failed (expr: loc, NULL_RTX, reason: "bitfield access" ); |
18079 | return 0; |
18080 | } |
18081 | if (!INDIRECT_REF_P (obj)) |
18082 | { |
18083 | expansion_failed (expr: obj, |
18084 | NULL_RTX, reason: "no indirect ref in inner refrence" ); |
18085 | return 0; |
18086 | } |
18087 | if (!offset && known_eq (bitpos, 0)) |
18088 | list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1, |
18089 | context); |
18090 | else if (toplev |
18091 | && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE |
18092 | && (dwarf_version >= 4 || !dwarf_strict)) |
18093 | { |
18094 | list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context); |
18095 | if (!list_ret) |
18096 | return 0; |
18097 | if (offset) |
18098 | { |
18099 | /* Variable offset. */ |
18100 | list_ret1 = loc_list_from_tree (offset, 0, context); |
18101 | if (list_ret1 == 0) |
18102 | return 0; |
18103 | add_loc_list (ret: &list_ret, list: list_ret1); |
18104 | if (!list_ret) |
18105 | return 0; |
18106 | add_loc_descr_to_each (list: list_ret, |
18107 | ref: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
18108 | } |
18109 | HOST_WIDE_INT value; |
18110 | if (bytepos.is_constant (const_value: &value) && value > 0) |
18111 | add_loc_descr_to_each (list: list_ret, |
18112 | ref: new_loc_descr (op: DW_OP_plus_uconst, oprnd1: value, oprnd2: 0)); |
18113 | else if (maybe_ne (a: bytepos, b: 0)) |
18114 | loc_list_plus_const (list_head: list_ret, offset: bytepos); |
18115 | add_loc_descr_to_each (list: list_ret, |
18116 | ref: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
18117 | } |
18118 | return list_ret; |
18119 | } |
18120 | |
18121 | /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case |
18122 | all operations from LOC are nops, move to the last one. Insert in NOPS all |
18123 | operations that are skipped. */ |
18124 | |
18125 | static void |
18126 | loc_descr_to_next_no_nop (dw_loc_descr_ref &loc, |
18127 | hash_set<dw_loc_descr_ref> &nops) |
18128 | { |
18129 | while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop) |
18130 | { |
18131 | nops.add (k: loc); |
18132 | loc = loc->dw_loc_next; |
18133 | } |
18134 | } |
18135 | |
18136 | /* Helper for loc_descr_without_nops: free the location description operation |
18137 | P. */ |
18138 | |
18139 | bool |
18140 | free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED) |
18141 | { |
18142 | ggc_free (loc); |
18143 | return true; |
18144 | } |
18145 | |
18146 | /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that |
18147 | finishes LOC. */ |
18148 | |
18149 | static void |
18150 | loc_descr_without_nops (dw_loc_descr_ref &loc) |
18151 | { |
18152 | if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL) |
18153 | return; |
18154 | |
18155 | /* Set of all DW_OP_nop operations we remove. */ |
18156 | hash_set<dw_loc_descr_ref> nops; |
18157 | |
18158 | /* First, strip all prefix NOP operations in order to keep the head of the |
18159 | operations list. */ |
18160 | loc_descr_to_next_no_nop (loc, nops); |
18161 | |
18162 | for (dw_loc_descr_ref cur = loc; cur != NULL;) |
18163 | { |
18164 | /* For control flow operations: strip "prefix" nops in destination |
18165 | labels. */ |
18166 | if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc) |
18167 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_oprnd1.v.val_loc, nops); |
18168 | if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc) |
18169 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_oprnd2.v.val_loc, nops); |
18170 | |
18171 | /* Do the same for the operations that follow, then move to the next |
18172 | iteration. */ |
18173 | if (cur->dw_loc_next != NULL) |
18174 | loc_descr_to_next_no_nop (loc&: cur->dw_loc_next, nops); |
18175 | cur = cur->dw_loc_next; |
18176 | } |
18177 | |
18178 | nops.traverse<void *, free_loc_descr> (NULL); |
18179 | } |
18180 | |
18181 | |
18182 | struct dwarf_procedure_info; |
18183 | |
18184 | /* Helper structure for location descriptions generation. */ |
18185 | struct loc_descr_context |
18186 | { |
18187 | /* The type that is implicitly referenced by DW_OP_push_object_address, or |
18188 | NULL_TREE if DW_OP_push_object_address in invalid for this location |
18189 | description. This is used when processing PLACEHOLDER_EXPR nodes. */ |
18190 | tree context_type; |
18191 | /* The ..._DECL node that should be translated as a |
18192 | DW_OP_push_object_address operation. */ |
18193 | tree base_decl; |
18194 | /* Information about the DWARF procedure we are currently generating. NULL if |
18195 | we are not generating a DWARF procedure. */ |
18196 | struct dwarf_procedure_info *dpi; |
18197 | /* True if integral PLACEHOLDER_EXPR stands for the first argument passed |
18198 | by consumer. Used for DW_TAG_generic_subrange attributes. */ |
18199 | bool placeholder_arg; |
18200 | /* True if PLACEHOLDER_EXPR has been seen. */ |
18201 | bool placeholder_seen; |
18202 | /* True if strict preservation of signedness has been requested. */ |
18203 | bool strict_signedness; |
18204 | }; |
18205 | |
18206 | /* DWARF procedures generation |
18207 | |
18208 | DWARF expressions (aka. location descriptions) are used to encode variable |
18209 | things such as sizes or offsets. Such computations can have redundant parts |
18210 | that can be factorized in order to reduce the size of the output debug |
18211 | information. This is the whole point of DWARF procedures. |
18212 | |
18213 | Thanks to stor-layout.cc, size and offset expressions in GENERIC trees are |
18214 | already factorized into functions ("size functions") in order to handle very |
18215 | big and complex types. Such functions are quite simple: they have integral |
18216 | arguments, they return an integral result and their body contains only a |
18217 | return statement with arithmetic expressions. This is the only kind of |
18218 | function we are interested in translating into DWARF procedures, here. |
18219 | |
18220 | DWARF expressions and DWARF procedure are executed using a stack, so we have |
18221 | to define some calling convention for them to interact. Let's say that: |
18222 | |
18223 | - Before calling a DWARF procedure, DWARF expressions must push on the stack |
18224 | all arguments in reverse order (right-to-left) so that when the DWARF |
18225 | procedure execution starts, the first argument is the top of the stack. |
18226 | |
18227 | - Then, when returning, the DWARF procedure must have consumed all arguments |
18228 | on the stack, must have pushed the result and touched nothing else. |
18229 | |
18230 | - Each integral argument and the result are integral types can be hold in a |
18231 | single stack slot. |
18232 | |
18233 | - We call "frame offset" the number of stack slots that are "under DWARF |
18234 | procedure control": it includes the arguments slots, the temporaries and |
18235 | the result slot. Thus, it is equal to the number of arguments when the |
18236 | procedure execution starts and must be equal to one (the result) when it |
18237 | returns. */ |
18238 | |
18239 | /* Helper structure used when generating operations for a DWARF procedure. */ |
18240 | struct dwarf_procedure_info |
18241 | { |
18242 | /* The FUNCTION_DECL node corresponding to the DWARF procedure that is |
18243 | currently translated. */ |
18244 | tree fndecl; |
18245 | /* The number of arguments FNDECL takes. */ |
18246 | unsigned args_count; |
18247 | }; |
18248 | |
18249 | /* Return a pointer to a newly created DIE node for a DWARF procedure. Add |
18250 | LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE, |
18251 | equate it to this DIE. */ |
18252 | |
18253 | static dw_die_ref |
18254 | new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl, |
18255 | dw_die_ref parent_die) |
18256 | { |
18257 | dw_die_ref dwarf_proc_die; |
18258 | |
18259 | if ((dwarf_version < 3 && dwarf_strict) |
18260 | || location == NULL) |
18261 | return NULL; |
18262 | |
18263 | dwarf_proc_die = new_die (tag_value: DW_TAG_dwarf_procedure, parent_die, t: fndecl); |
18264 | if (fndecl) |
18265 | equate_decl_number_to_die (decl: fndecl, decl_die: dwarf_proc_die); |
18266 | add_AT_loc (die: dwarf_proc_die, attr_kind: DW_AT_location, loc: location); |
18267 | return dwarf_proc_die; |
18268 | } |
18269 | |
18270 | /* Return whether TYPE is a supported type as a DWARF procedure argument |
18271 | type or return type (we handle only scalar types and pointer types that |
18272 | aren't wider than the DWARF expression evaluation stack). */ |
18273 | |
18274 | static bool |
18275 | is_handled_procedure_type (tree type) |
18276 | { |
18277 | return ((INTEGRAL_TYPE_P (type) |
18278 | || TREE_CODE (type) == OFFSET_TYPE |
18279 | || TREE_CODE (type) == POINTER_TYPE) |
18280 | && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE); |
18281 | } |
18282 | |
18283 | /* Helper for resolve_args_picking: do the same but stop when coming across |
18284 | visited nodes. For each node we visit, register in FRAME_OFFSETS the frame |
18285 | offset *before* evaluating the corresponding operation. */ |
18286 | |
18287 | static bool |
18288 | resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset, |
18289 | struct dwarf_procedure_info *dpi, |
18290 | hash_map<dw_loc_descr_ref, unsigned> &frame_offsets) |
18291 | { |
18292 | /* The "frame_offset" identifier is already used to name a macro... */ |
18293 | unsigned frame_offset_ = initial_frame_offset; |
18294 | dw_loc_descr_ref l; |
18295 | |
18296 | for (l = loc; l != NULL;) |
18297 | { |
18298 | bool existed; |
18299 | unsigned &l_frame_offset = frame_offsets.get_or_insert (k: l, existed: &existed); |
18300 | |
18301 | /* If we already met this node, there is nothing to compute anymore. */ |
18302 | if (existed) |
18303 | { |
18304 | /* Make sure that the stack size is consistent wherever the execution |
18305 | flow comes from. */ |
18306 | gcc_assert ((unsigned) l_frame_offset == frame_offset_); |
18307 | break; |
18308 | } |
18309 | l_frame_offset = frame_offset_; |
18310 | |
18311 | /* If needed, relocate the picking offset with respect to the frame |
18312 | offset. */ |
18313 | if (l->frame_offset_rel) |
18314 | { |
18315 | unsigned HOST_WIDE_INT off; |
18316 | switch (l->dw_loc_opc) |
18317 | { |
18318 | case DW_OP_pick: |
18319 | off = l->dw_loc_oprnd1.v.val_unsigned; |
18320 | break; |
18321 | case DW_OP_dup: |
18322 | off = 0; |
18323 | break; |
18324 | case DW_OP_over: |
18325 | off = 1; |
18326 | break; |
18327 | default: |
18328 | gcc_unreachable (); |
18329 | } |
18330 | /* frame_offset_ is the size of the current stack frame, including |
18331 | incoming arguments. Besides, the arguments are pushed |
18332 | right-to-left. Thus, in order to access the Nth argument from |
18333 | this operation node, the picking has to skip temporaries *plus* |
18334 | one stack slot per argument (0 for the first one, 1 for the second |
18335 | one, etc.). |
18336 | |
18337 | The targetted argument number (N) is already set as the operand, |
18338 | and the number of temporaries can be computed with: |
18339 | frame_offsets_ - dpi->args_count */ |
18340 | off += frame_offset_ - dpi->args_count; |
18341 | |
18342 | /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */ |
18343 | if (off > 255) |
18344 | return false; |
18345 | |
18346 | if (off == 0) |
18347 | { |
18348 | l->dw_loc_opc = DW_OP_dup; |
18349 | l->dw_loc_oprnd1.v.val_unsigned = 0; |
18350 | } |
18351 | else if (off == 1) |
18352 | { |
18353 | l->dw_loc_opc = DW_OP_over; |
18354 | l->dw_loc_oprnd1.v.val_unsigned = 0; |
18355 | } |
18356 | else |
18357 | { |
18358 | l->dw_loc_opc = DW_OP_pick; |
18359 | l->dw_loc_oprnd1.v.val_unsigned = off; |
18360 | } |
18361 | } |
18362 | |
18363 | /* Update frame_offset according to the effect the current operation has |
18364 | on the stack. */ |
18365 | switch (l->dw_loc_opc) |
18366 | { |
18367 | case DW_OP_deref: |
18368 | case DW_OP_swap: |
18369 | case DW_OP_rot: |
18370 | case DW_OP_abs: |
18371 | case DW_OP_neg: |
18372 | case DW_OP_not: |
18373 | case DW_OP_plus_uconst: |
18374 | case DW_OP_skip: |
18375 | case DW_OP_reg0: |
18376 | case DW_OP_reg1: |
18377 | case DW_OP_reg2: |
18378 | case DW_OP_reg3: |
18379 | case DW_OP_reg4: |
18380 | case DW_OP_reg5: |
18381 | case DW_OP_reg6: |
18382 | case DW_OP_reg7: |
18383 | case DW_OP_reg8: |
18384 | case DW_OP_reg9: |
18385 | case DW_OP_reg10: |
18386 | case DW_OP_reg11: |
18387 | case DW_OP_reg12: |
18388 | case DW_OP_reg13: |
18389 | case DW_OP_reg14: |
18390 | case DW_OP_reg15: |
18391 | case DW_OP_reg16: |
18392 | case DW_OP_reg17: |
18393 | case DW_OP_reg18: |
18394 | case DW_OP_reg19: |
18395 | case DW_OP_reg20: |
18396 | case DW_OP_reg21: |
18397 | case DW_OP_reg22: |
18398 | case DW_OP_reg23: |
18399 | case DW_OP_reg24: |
18400 | case DW_OP_reg25: |
18401 | case DW_OP_reg26: |
18402 | case DW_OP_reg27: |
18403 | case DW_OP_reg28: |
18404 | case DW_OP_reg29: |
18405 | case DW_OP_reg30: |
18406 | case DW_OP_reg31: |
18407 | case DW_OP_bregx: |
18408 | case DW_OP_piece: |
18409 | case DW_OP_deref_size: |
18410 | case DW_OP_nop: |
18411 | case DW_OP_bit_piece: |
18412 | case DW_OP_implicit_value: |
18413 | case DW_OP_stack_value: |
18414 | case DW_OP_deref_type: |
18415 | case DW_OP_convert: |
18416 | case DW_OP_reinterpret: |
18417 | case DW_OP_GNU_deref_type: |
18418 | case DW_OP_GNU_convert: |
18419 | case DW_OP_GNU_reinterpret: |
18420 | break; |
18421 | |
18422 | case DW_OP_addr: |
18423 | case DW_OP_const1u: |
18424 | case DW_OP_const1s: |
18425 | case DW_OP_const2u: |
18426 | case DW_OP_const2s: |
18427 | case DW_OP_const4u: |
18428 | case DW_OP_const4s: |
18429 | case DW_OP_const8u: |
18430 | case DW_OP_const8s: |
18431 | case DW_OP_constu: |
18432 | case DW_OP_consts: |
18433 | case DW_OP_dup: |
18434 | case DW_OP_over: |
18435 | case DW_OP_pick: |
18436 | case DW_OP_lit0: |
18437 | case DW_OP_lit1: |
18438 | case DW_OP_lit2: |
18439 | case DW_OP_lit3: |
18440 | case DW_OP_lit4: |
18441 | case DW_OP_lit5: |
18442 | case DW_OP_lit6: |
18443 | case DW_OP_lit7: |
18444 | case DW_OP_lit8: |
18445 | case DW_OP_lit9: |
18446 | case DW_OP_lit10: |
18447 | case DW_OP_lit11: |
18448 | case DW_OP_lit12: |
18449 | case DW_OP_lit13: |
18450 | case DW_OP_lit14: |
18451 | case DW_OP_lit15: |
18452 | case DW_OP_lit16: |
18453 | case DW_OP_lit17: |
18454 | case DW_OP_lit18: |
18455 | case DW_OP_lit19: |
18456 | case DW_OP_lit20: |
18457 | case DW_OP_lit21: |
18458 | case DW_OP_lit22: |
18459 | case DW_OP_lit23: |
18460 | case DW_OP_lit24: |
18461 | case DW_OP_lit25: |
18462 | case DW_OP_lit26: |
18463 | case DW_OP_lit27: |
18464 | case DW_OP_lit28: |
18465 | case DW_OP_lit29: |
18466 | case DW_OP_lit30: |
18467 | case DW_OP_lit31: |
18468 | case DW_OP_breg0: |
18469 | case DW_OP_breg1: |
18470 | case DW_OP_breg2: |
18471 | case DW_OP_breg3: |
18472 | case DW_OP_breg4: |
18473 | case DW_OP_breg5: |
18474 | case DW_OP_breg6: |
18475 | case DW_OP_breg7: |
18476 | case DW_OP_breg8: |
18477 | case DW_OP_breg9: |
18478 | case DW_OP_breg10: |
18479 | case DW_OP_breg11: |
18480 | case DW_OP_breg12: |
18481 | case DW_OP_breg13: |
18482 | case DW_OP_breg14: |
18483 | case DW_OP_breg15: |
18484 | case DW_OP_breg16: |
18485 | case DW_OP_breg17: |
18486 | case DW_OP_breg18: |
18487 | case DW_OP_breg19: |
18488 | case DW_OP_breg20: |
18489 | case DW_OP_breg21: |
18490 | case DW_OP_breg22: |
18491 | case DW_OP_breg23: |
18492 | case DW_OP_breg24: |
18493 | case DW_OP_breg25: |
18494 | case DW_OP_breg26: |
18495 | case DW_OP_breg27: |
18496 | case DW_OP_breg28: |
18497 | case DW_OP_breg29: |
18498 | case DW_OP_breg30: |
18499 | case DW_OP_breg31: |
18500 | case DW_OP_fbreg: |
18501 | case DW_OP_push_object_address: |
18502 | case DW_OP_call_frame_cfa: |
18503 | case DW_OP_GNU_variable_value: |
18504 | case DW_OP_GNU_addr_index: |
18505 | case DW_OP_GNU_const_index: |
18506 | ++frame_offset_; |
18507 | break; |
18508 | |
18509 | case DW_OP_drop: |
18510 | case DW_OP_xderef: |
18511 | case DW_OP_and: |
18512 | case DW_OP_div: |
18513 | case DW_OP_minus: |
18514 | case DW_OP_mod: |
18515 | case DW_OP_mul: |
18516 | case DW_OP_or: |
18517 | case DW_OP_plus: |
18518 | case DW_OP_shl: |
18519 | case DW_OP_shr: |
18520 | case DW_OP_shra: |
18521 | case DW_OP_xor: |
18522 | case DW_OP_bra: |
18523 | case DW_OP_eq: |
18524 | case DW_OP_ge: |
18525 | case DW_OP_gt: |
18526 | case DW_OP_le: |
18527 | case DW_OP_lt: |
18528 | case DW_OP_ne: |
18529 | case DW_OP_regx: |
18530 | case DW_OP_xderef_size: |
18531 | --frame_offset_; |
18532 | break; |
18533 | |
18534 | case DW_OP_call2: |
18535 | case DW_OP_call4: |
18536 | case DW_OP_call_ref: |
18537 | { |
18538 | dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die; |
18539 | int *stack_usage = dwarf_proc_stack_usage_map->get (k: dwarf_proc); |
18540 | |
18541 | if (stack_usage == NULL) |
18542 | return false; |
18543 | frame_offset_ += *stack_usage; |
18544 | break; |
18545 | } |
18546 | |
18547 | case DW_OP_implicit_pointer: |
18548 | case DW_OP_entry_value: |
18549 | case DW_OP_const_type: |
18550 | case DW_OP_regval_type: |
18551 | case DW_OP_form_tls_address: |
18552 | case DW_OP_GNU_push_tls_address: |
18553 | case DW_OP_GNU_uninit: |
18554 | case DW_OP_GNU_encoded_addr: |
18555 | case DW_OP_GNU_implicit_pointer: |
18556 | case DW_OP_GNU_entry_value: |
18557 | case DW_OP_GNU_const_type: |
18558 | case DW_OP_GNU_regval_type: |
18559 | case DW_OP_GNU_parameter_ref: |
18560 | /* loc_list_from_tree will probably not output these operations for |
18561 | size functions, so assume they will not appear here. */ |
18562 | /* Fall through... */ |
18563 | |
18564 | default: |
18565 | gcc_unreachable (); |
18566 | } |
18567 | |
18568 | /* Now, follow the control flow (except subroutine calls). */ |
18569 | switch (l->dw_loc_opc) |
18570 | { |
18571 | case DW_OP_bra: |
18572 | if (!resolve_args_picking_1 (loc: l->dw_loc_next, initial_frame_offset: frame_offset_, dpi, |
18573 | frame_offsets)) |
18574 | return false; |
18575 | /* Fall through. */ |
18576 | |
18577 | case DW_OP_skip: |
18578 | l = l->dw_loc_oprnd1.v.val_loc; |
18579 | break; |
18580 | |
18581 | case DW_OP_stack_value: |
18582 | return true; |
18583 | |
18584 | default: |
18585 | l = l->dw_loc_next; |
18586 | break; |
18587 | } |
18588 | } |
18589 | |
18590 | return true; |
18591 | } |
18592 | |
18593 | /* Make a DFS over operations reachable through LOC (i.e. follow branch |
18594 | operations) in order to resolve the operand of DW_OP_pick operations that |
18595 | target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame |
18596 | offset *before* LOC is executed. Return if all relocations were |
18597 | successful. */ |
18598 | |
18599 | static bool |
18600 | resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset, |
18601 | struct dwarf_procedure_info *dpi) |
18602 | { |
18603 | /* Associate to all visited operations the frame offset *before* evaluating |
18604 | this operation. */ |
18605 | hash_map<dw_loc_descr_ref, unsigned> frame_offsets; |
18606 | |
18607 | return |
18608 | resolve_args_picking_1 (loc, initial_frame_offset, dpi, frame_offsets); |
18609 | } |
18610 | |
18611 | /* Try to generate a DWARF procedure that computes the same result as FNDECL. |
18612 | Return NULL if it is not possible. */ |
18613 | |
18614 | static dw_die_ref |
18615 | function_to_dwarf_procedure (tree fndecl) |
18616 | { |
18617 | struct dwarf_procedure_info dpi; |
18618 | struct loc_descr_context ctx = { |
18619 | NULL_TREE, /* context_type */ |
18620 | NULL_TREE, /* base_decl */ |
18621 | .dpi: &dpi, /* dpi */ |
18622 | .placeholder_arg: false, /* placeholder_arg */ |
18623 | .placeholder_seen: false, /* placeholder_seen */ |
18624 | .strict_signedness: true /* strict_signedness */ |
18625 | }; |
18626 | dw_die_ref dwarf_proc_die; |
18627 | tree tree_body = DECL_SAVED_TREE (fndecl); |
18628 | dw_loc_descr_ref loc_body, epilogue; |
18629 | |
18630 | tree cursor; |
18631 | unsigned i; |
18632 | |
18633 | /* Do not generate multiple DWARF procedures for the same function |
18634 | declaration. */ |
18635 | dwarf_proc_die = lookup_decl_die (decl: fndecl); |
18636 | if (dwarf_proc_die != NULL) |
18637 | return dwarf_proc_die; |
18638 | |
18639 | /* DWARF procedures are available starting with the DWARFv3 standard. */ |
18640 | if (dwarf_version < 3 && dwarf_strict) |
18641 | return NULL; |
18642 | |
18643 | /* We handle only functions for which we still have a body, that return a |
18644 | supported type and that takes arguments with supported types. Note that |
18645 | there is no point translating functions that return nothing. */ |
18646 | if (tree_body == NULL_TREE |
18647 | || DECL_RESULT (fndecl) == NULL_TREE |
18648 | || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl)))) |
18649 | return NULL; |
18650 | |
18651 | for (cursor = DECL_ARGUMENTS (fndecl); |
18652 | cursor != NULL_TREE; |
18653 | cursor = TREE_CHAIN (cursor)) |
18654 | if (!is_handled_procedure_type (TREE_TYPE (cursor))) |
18655 | return NULL; |
18656 | |
18657 | /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */ |
18658 | if (TREE_CODE (tree_body) != RETURN_EXPR) |
18659 | return NULL; |
18660 | tree_body = TREE_OPERAND (tree_body, 0); |
18661 | if (TREE_CODE (tree_body) != MODIFY_EXPR |
18662 | || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl)) |
18663 | return NULL; |
18664 | tree_body = TREE_OPERAND (tree_body, 1); |
18665 | |
18666 | /* Try to translate the body expression itself. Note that this will probably |
18667 | cause an infinite recursion if its call graph has a cycle. This is very |
18668 | unlikely for size functions, however, so don't bother with such things at |
18669 | the moment. */ |
18670 | dpi.fndecl = fndecl; |
18671 | dpi.args_count = list_length (DECL_ARGUMENTS (fndecl)); |
18672 | loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx); |
18673 | if (!loc_body) |
18674 | return NULL; |
18675 | |
18676 | /* After evaluating all operands in "loc_body", we should still have on the |
18677 | stack all arguments plus the desired function result (top of the stack). |
18678 | Generate code in order to keep only the result in our stack frame. */ |
18679 | epilogue = NULL; |
18680 | for (i = 0; i < dpi.args_count; ++i) |
18681 | { |
18682 | dw_loc_descr_ref op_couple = new_loc_descr (op: DW_OP_swap, oprnd1: 0, oprnd2: 0); |
18683 | op_couple->dw_loc_next = new_loc_descr (op: DW_OP_drop, oprnd1: 0, oprnd2: 0); |
18684 | op_couple->dw_loc_next->dw_loc_next = epilogue; |
18685 | epilogue = op_couple; |
18686 | } |
18687 | add_loc_descr (list_head: &loc_body, descr: epilogue); |
18688 | if (!resolve_args_picking (loc: loc_body, initial_frame_offset: dpi.args_count, dpi: &dpi)) |
18689 | return NULL; |
18690 | |
18691 | /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed |
18692 | because they are considered useful. Now there is an epilogue, they are |
18693 | not anymore, so give it another try. */ |
18694 | loc_descr_without_nops (loc&: loc_body); |
18695 | |
18696 | /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as |
18697 | a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely, |
18698 | though, given that size functions do not come from source, so they should |
18699 | not have a dedicated DW_TAG_subprogram DIE. */ |
18700 | dwarf_proc_die |
18701 | = new_dwarf_proc_die (location: loc_body, fndecl, |
18702 | parent_die: get_context_die (DECL_CONTEXT (fndecl))); |
18703 | |
18704 | /* The called DWARF procedure consumes one stack slot per argument and |
18705 | returns one stack slot. */ |
18706 | dwarf_proc_stack_usage_map->put (k: dwarf_proc_die, v: 1 - dpi.args_count); |
18707 | |
18708 | return dwarf_proc_die; |
18709 | } |
18710 | |
18711 | /* Helper function for loc_list_from_tree. Perform OP binary op, |
18712 | but after converting arguments to type_die, afterwards convert |
18713 | back to unsigned. */ |
18714 | |
18715 | static dw_loc_list_ref |
18716 | typed_binop_from_tree (enum dwarf_location_atom op, tree loc, |
18717 | dw_die_ref type_die, scalar_int_mode mode, |
18718 | struct loc_descr_context *context) |
18719 | { |
18720 | dw_loc_list_ref op0, op1; |
18721 | dw_loc_descr_ref cvt, binop; |
18722 | |
18723 | if (type_die == NULL) |
18724 | return NULL; |
18725 | |
18726 | op0 = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context); |
18727 | op1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context); |
18728 | if (op0 == NULL || op1 == NULL) |
18729 | return NULL; |
18730 | |
18731 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
18732 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
18733 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
18734 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
18735 | add_loc_descr_to_each (list: op0, ref: cvt); |
18736 | |
18737 | cvt = new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0); |
18738 | cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
18739 | cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die; |
18740 | cvt->dw_loc_oprnd1.v.val_die_ref.external = 0; |
18741 | add_loc_descr_to_each (list: op1, ref: cvt); |
18742 | |
18743 | add_loc_list (ret: &op0, list: op1); |
18744 | if (op0 == NULL) |
18745 | return NULL; |
18746 | |
18747 | binop = new_loc_descr (op, oprnd1: 0, oprnd2: 0); |
18748 | convert_descriptor_to_mode (mode, op: binop); |
18749 | add_loc_descr_to_each (list: op0, ref: binop); |
18750 | |
18751 | return op0; |
18752 | } |
18753 | |
18754 | /* Generate Dwarf location list representing LOC. |
18755 | If WANT_ADDRESS is false, expression computing LOC will be computed |
18756 | If WANT_ADDRESS is 1, expression computing address of LOC will be returned |
18757 | if WANT_ADDRESS is 2, expression computing address useable in location |
18758 | will be returned (i.e. DW_OP_reg can be used |
18759 | to refer to register values). |
18760 | |
18761 | CONTEXT provides information to customize the location descriptions |
18762 | generation. Its context_type field specifies what type is implicitly |
18763 | referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation |
18764 | will not be generated. |
18765 | |
18766 | Its DPI field determines whether we are generating a DWARF expression for a |
18767 | DWARF procedure, so PARM_DECL references are processed specifically. |
18768 | |
18769 | If CONTEXT is NULL, the behavior is the same as if context_type, base_decl |
18770 | and dpi fields were null. */ |
18771 | |
18772 | static dw_loc_list_ref |
18773 | loc_list_from_tree_1 (tree loc, int want_address, |
18774 | struct loc_descr_context *context) |
18775 | { |
18776 | dw_loc_descr_ref ret = NULL, ret1 = NULL; |
18777 | dw_loc_list_ref list_ret = NULL, list_ret1 = NULL; |
18778 | int have_address = 0; |
18779 | enum dwarf_location_atom op; |
18780 | |
18781 | /* ??? Most of the time we do not take proper care for sign/zero |
18782 | extending the values properly. Hopefully this won't be a real |
18783 | problem... */ |
18784 | |
18785 | if (context != NULL |
18786 | && context->base_decl == loc |
18787 | && want_address == 0) |
18788 | { |
18789 | if (dwarf_version >= 3 || !dwarf_strict) |
18790 | return new_loc_list (expr: new_loc_descr (op: DW_OP_push_object_address, oprnd1: 0, oprnd2: 0), |
18791 | NULL, vbegin: 0, NULL, vend: 0, NULL); |
18792 | else |
18793 | return NULL; |
18794 | } |
18795 | |
18796 | switch (TREE_CODE (loc)) |
18797 | { |
18798 | case ERROR_MARK: |
18799 | expansion_failed (expr: loc, NULL_RTX, reason: "ERROR_MARK" ); |
18800 | return 0; |
18801 | |
18802 | case PLACEHOLDER_EXPR: |
18803 | /* This case involves extracting fields from an object to determine the |
18804 | position of other fields. It is supposed to appear only as the first |
18805 | operand of COMPONENT_REF nodes and to reference precisely the type |
18806 | that the context allows or its enclosing type. */ |
18807 | if (context != NULL |
18808 | && (TREE_TYPE (loc) == context->context_type |
18809 | || TREE_TYPE (loc) == TYPE_CONTEXT (context->context_type)) |
18810 | && want_address >= 1) |
18811 | { |
18812 | if (dwarf_version >= 3 || !dwarf_strict) |
18813 | { |
18814 | ret = new_loc_descr (op: DW_OP_push_object_address, oprnd1: 0, oprnd2: 0); |
18815 | have_address = 1; |
18816 | break; |
18817 | } |
18818 | else |
18819 | return NULL; |
18820 | } |
18821 | /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for |
18822 | the single argument passed by consumer. */ |
18823 | else if (context != NULL |
18824 | && context->placeholder_arg |
18825 | && INTEGRAL_TYPE_P (TREE_TYPE (loc)) |
18826 | && want_address == 0) |
18827 | { |
18828 | ret = new_loc_descr (op: DW_OP_pick, oprnd1: 0, oprnd2: 0); |
18829 | ret->frame_offset_rel = 1; |
18830 | context->placeholder_seen = true; |
18831 | break; |
18832 | } |
18833 | else |
18834 | expansion_failed (expr: loc, NULL_RTX, |
18835 | reason: "PLACEHOLDER_EXPR for an unexpected type" ); |
18836 | break; |
18837 | |
18838 | case CALL_EXPR: |
18839 | { |
18840 | tree callee = get_callee_fndecl (loc); |
18841 | dw_die_ref dwarf_proc; |
18842 | |
18843 | if (callee |
18844 | && is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))) |
18845 | && (dwarf_proc = function_to_dwarf_procedure (fndecl: callee))) |
18846 | { |
18847 | /* DWARF procedures are used for size functions, which are built |
18848 | when size expressions contain conditional constructs, so we |
18849 | request strict preservation of signedness for comparisons. */ |
18850 | bool old_strict_signedness; |
18851 | if (context) |
18852 | { |
18853 | old_strict_signedness = context->strict_signedness; |
18854 | context->strict_signedness = true; |
18855 | } |
18856 | |
18857 | /* Evaluate arguments right-to-left so that the first argument |
18858 | will be the top-most one on the stack. */ |
18859 | for (int i = call_expr_nargs (loc) - 1; i >= 0; --i) |
18860 | { |
18861 | tree arg = CALL_EXPR_ARG (loc, i); |
18862 | ret1 = loc_descriptor_from_tree (arg, 0, context); |
18863 | if (!ret1) |
18864 | { |
18865 | expansion_failed (expr: arg, NULL_RTX, reason: "CALL_EXPR argument" ); |
18866 | return NULL; |
18867 | } |
18868 | add_loc_descr (list_head: &ret, descr: ret1); |
18869 | } |
18870 | |
18871 | ret1 = new_loc_descr (op: DW_OP_call4, oprnd1: 0, oprnd2: 0); |
18872 | ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
18873 | ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc; |
18874 | ret1->dw_loc_oprnd1.v.val_die_ref.external = 0; |
18875 | add_loc_descr (list_head: &ret, descr: ret1); |
18876 | if (context) |
18877 | context->strict_signedness = old_strict_signedness; |
18878 | } |
18879 | else |
18880 | expansion_failed (expr: loc, NULL_RTX, reason: "CALL_EXPR target" ); |
18881 | break; |
18882 | } |
18883 | |
18884 | case PREINCREMENT_EXPR: |
18885 | case PREDECREMENT_EXPR: |
18886 | case POSTINCREMENT_EXPR: |
18887 | case POSTDECREMENT_EXPR: |
18888 | expansion_failed (expr: loc, NULL_RTX, reason: "PRE/POST INDCREMENT/DECREMENT" ); |
18889 | /* There are no opcodes for these operations. */ |
18890 | return 0; |
18891 | |
18892 | case ADDR_EXPR: |
18893 | /* If we already want an address, see if there is INDIRECT_REF inside |
18894 | e.g. for &this->field. */ |
18895 | if (want_address) |
18896 | { |
18897 | list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref |
18898 | (loc, toplev: want_address == 2, context); |
18899 | if (list_ret) |
18900 | have_address = 1; |
18901 | else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0)) |
18902 | && (ret = cst_pool_loc_descr (loc))) |
18903 | have_address = 1; |
18904 | } |
18905 | /* Otherwise, process the argument and look for the address. */ |
18906 | if (!list_ret && !ret) |
18907 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 1, context); |
18908 | else |
18909 | { |
18910 | if (want_address) |
18911 | expansion_failed (expr: loc, NULL_RTX, reason: "need address of ADDR_EXPR" ); |
18912 | return NULL; |
18913 | } |
18914 | break; |
18915 | |
18916 | case VAR_DECL: |
18917 | if (DECL_THREAD_LOCAL_P (loc)) |
18918 | { |
18919 | rtx rtl; |
18920 | enum dwarf_location_atom tls_op; |
18921 | enum dtprel_bool dtprel = dtprel_false; |
18922 | |
18923 | if (targetm.have_tls) |
18924 | { |
18925 | /* If this is not defined, we have no way to emit the |
18926 | data. */ |
18927 | if (!targetm.asm_out.output_dwarf_dtprel) |
18928 | return 0; |
18929 | |
18930 | /* The way DW_OP_GNU_push_tls_address is specified, we |
18931 | can only look up addresses of objects in the current |
18932 | module. We used DW_OP_addr as first op, but that's |
18933 | wrong, because DW_OP_addr is relocated by the debug |
18934 | info consumer, while DW_OP_GNU_push_tls_address |
18935 | operand shouldn't be. */ |
18936 | if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc)) |
18937 | return 0; |
18938 | dtprel = dtprel_true; |
18939 | /* We check for DWARF 5 here because gdb did not implement |
18940 | DW_OP_form_tls_address until after 7.12. */ |
18941 | tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address |
18942 | : DW_OP_GNU_push_tls_address); |
18943 | } |
18944 | else |
18945 | { |
18946 | if (!targetm.emutls.debug_form_tls_address |
18947 | || !(dwarf_version >= 3 || !dwarf_strict)) |
18948 | return 0; |
18949 | /* We stuffed the control variable into the DECL_VALUE_EXPR |
18950 | to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should |
18951 | no longer appear in gimple code. We used the control |
18952 | variable in specific so that we could pick it up here. */ |
18953 | loc = DECL_VALUE_EXPR (loc); |
18954 | tls_op = DW_OP_form_tls_address; |
18955 | } |
18956 | |
18957 | rtl = rtl_for_decl_location (loc); |
18958 | if (rtl == NULL_RTX) |
18959 | return 0; |
18960 | |
18961 | if (!MEM_P (rtl)) |
18962 | return 0; |
18963 | rtl = XEXP (rtl, 0); |
18964 | if (! CONSTANT_P (rtl)) |
18965 | return 0; |
18966 | |
18967 | ret = new_addr_loc_descr (addr: rtl, dtprel); |
18968 | ret1 = new_loc_descr (op: tls_op, oprnd1: 0, oprnd2: 0); |
18969 | add_loc_descr (list_head: &ret, descr: ret1); |
18970 | |
18971 | have_address = 1; |
18972 | break; |
18973 | } |
18974 | /* FALLTHRU */ |
18975 | |
18976 | case PARM_DECL: |
18977 | if (context != NULL && context->dpi != NULL |
18978 | && DECL_CONTEXT (loc) == context->dpi->fndecl) |
18979 | { |
18980 | /* We are generating code for a DWARF procedure and we want to access |
18981 | one of its arguments: find the appropriate argument offset and let |
18982 | the resolve_args_picking pass compute the offset that complies |
18983 | with the stack frame size. */ |
18984 | unsigned i = 0; |
18985 | tree cursor; |
18986 | |
18987 | for (cursor = DECL_ARGUMENTS (context->dpi->fndecl); |
18988 | cursor != NULL_TREE && cursor != loc; |
18989 | cursor = TREE_CHAIN (cursor), ++i) |
18990 | ; |
18991 | /* If we are translating a DWARF procedure, all referenced parameters |
18992 | must belong to the current function. */ |
18993 | gcc_assert (cursor != NULL_TREE); |
18994 | |
18995 | ret = new_loc_descr (op: DW_OP_pick, oprnd1: i, oprnd2: 0); |
18996 | ret->frame_offset_rel = 1; |
18997 | break; |
18998 | } |
18999 | /* FALLTHRU */ |
19000 | |
19001 | case RESULT_DECL: |
19002 | if (DECL_HAS_VALUE_EXPR_P (loc)) |
19003 | { |
19004 | tree value_expr = DECL_VALUE_EXPR (loc); |
19005 | |
19006 | /* Non-local frame structures are DECL_IGNORED_P variables so we need |
19007 | to wait until they get an RTX in order to reference them. */ |
19008 | if (early_dwarf |
19009 | && TREE_CODE (value_expr) == COMPONENT_REF |
19010 | && VAR_P (TREE_OPERAND (value_expr, 0)) |
19011 | && DECL_NONLOCAL_FRAME (TREE_OPERAND (value_expr, 0))) |
19012 | ; |
19013 | else |
19014 | return loc_list_from_tree_1 (loc: value_expr, want_address, context); |
19015 | } |
19016 | |
19017 | /* FALLTHRU */ |
19018 | |
19019 | case FUNCTION_DECL: |
19020 | { |
19021 | rtx rtl; |
19022 | var_loc_list *loc_list = lookup_decl_loc (decl: loc); |
19023 | |
19024 | if (loc_list && loc_list->first) |
19025 | { |
19026 | list_ret = dw_loc_list (loc_list, decl: loc, want_address); |
19027 | have_address = want_address != 0; |
19028 | break; |
19029 | } |
19030 | rtl = rtl_for_decl_location (loc); |
19031 | if (rtl == NULL_RTX) |
19032 | { |
19033 | if (TREE_CODE (loc) != FUNCTION_DECL |
19034 | && early_dwarf |
19035 | && want_address != 1 |
19036 | && ! DECL_IGNORED_P (loc) |
19037 | && (INTEGRAL_TYPE_P (TREE_TYPE (loc)) |
19038 | || POINTER_TYPE_P (TREE_TYPE (loc))) |
19039 | && TYPE_MODE (TREE_TYPE (loc)) != BLKmode |
19040 | && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc))) |
19041 | <= DWARF2_ADDR_SIZE)) |
19042 | { |
19043 | dw_die_ref ref = lookup_decl_die (decl: loc); |
19044 | if (ref) |
19045 | { |
19046 | ret = new_loc_descr (op: DW_OP_GNU_variable_value, oprnd1: 0, oprnd2: 0); |
19047 | ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
19048 | ret->dw_loc_oprnd1.v.val_die_ref.die = ref; |
19049 | ret->dw_loc_oprnd1.v.val_die_ref.external = 0; |
19050 | } |
19051 | else if (current_function_decl |
19052 | && DECL_CONTEXT (loc) == current_function_decl) |
19053 | { |
19054 | ret = new_loc_descr (op: DW_OP_GNU_variable_value, oprnd1: 0, oprnd2: 0); |
19055 | ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref; |
19056 | ret->dw_loc_oprnd1.v.val_decl_ref = loc; |
19057 | } |
19058 | break; |
19059 | } |
19060 | expansion_failed (expr: loc, NULL_RTX, reason: "DECL has no RTL" ); |
19061 | return 0; |
19062 | } |
19063 | else if (CONST_INT_P (rtl)) |
19064 | { |
19065 | HOST_WIDE_INT val = INTVAL (rtl); |
19066 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
19067 | val &= GET_MODE_MASK (DECL_MODE (loc)); |
19068 | ret = int_loc_descriptor (poly_i: val); |
19069 | } |
19070 | else if (GET_CODE (rtl) == CONST_STRING) |
19071 | { |
19072 | expansion_failed (expr: loc, NULL_RTX, reason: "CONST_STRING" ); |
19073 | return 0; |
19074 | } |
19075 | else if (CONSTANT_P (rtl) && const_ok_for_output (rtl)) |
19076 | ret = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
19077 | else |
19078 | { |
19079 | machine_mode mode, mem_mode; |
19080 | |
19081 | /* Certain constructs can only be represented at top-level. */ |
19082 | if (want_address == 2) |
19083 | { |
19084 | ret = loc_descriptor (rtl, VOIDmode, |
19085 | initialized: VAR_INIT_STATUS_INITIALIZED); |
19086 | have_address = 1; |
19087 | } |
19088 | else |
19089 | { |
19090 | mode = GET_MODE (rtl); |
19091 | mem_mode = VOIDmode; |
19092 | if (MEM_P (rtl)) |
19093 | { |
19094 | mem_mode = mode; |
19095 | mode = get_address_mode (mem: rtl); |
19096 | rtl = XEXP (rtl, 0); |
19097 | have_address = 1; |
19098 | } |
19099 | ret = mem_loc_descriptor (rtl, mode, mem_mode, |
19100 | initialized: VAR_INIT_STATUS_INITIALIZED); |
19101 | } |
19102 | if (!ret) |
19103 | expansion_failed (expr: loc, rtl, |
19104 | reason: "failed to produce loc descriptor for rtl" ); |
19105 | } |
19106 | } |
19107 | break; |
19108 | |
19109 | case MEM_REF: |
19110 | if (!integer_zerop (TREE_OPERAND (loc, 1))) |
19111 | { |
19112 | have_address = 1; |
19113 | goto do_plus; |
19114 | } |
19115 | /* Fallthru. */ |
19116 | case INDIRECT_REF: |
19117 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19118 | have_address = 1; |
19119 | break; |
19120 | |
19121 | case TARGET_MEM_REF: |
19122 | case SSA_NAME: |
19123 | case DEBUG_EXPR_DECL: |
19124 | return NULL; |
19125 | |
19126 | case COMPOUND_EXPR: |
19127 | return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address, |
19128 | context); |
19129 | |
19130 | CASE_CONVERT: |
19131 | case VIEW_CONVERT_EXPR: |
19132 | case SAVE_EXPR: |
19133 | case MODIFY_EXPR: |
19134 | case NON_LVALUE_EXPR: |
19135 | return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address, |
19136 | context); |
19137 | |
19138 | case COMPONENT_REF: |
19139 | case BIT_FIELD_REF: |
19140 | case ARRAY_REF: |
19141 | case ARRAY_RANGE_REF: |
19142 | case REALPART_EXPR: |
19143 | case IMAGPART_EXPR: |
19144 | { |
19145 | tree obj, offset; |
19146 | poly_int64 bitsize, bitpos, bytepos; |
19147 | machine_mode mode; |
19148 | int unsignedp, reversep, volatilep = 0; |
19149 | |
19150 | obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode, |
19151 | &unsignedp, &reversep, &volatilep); |
19152 | |
19153 | gcc_assert (obj != loc); |
19154 | |
19155 | list_ret = loc_list_from_tree_1 (loc: obj, |
19156 | want_address: want_address == 2 |
19157 | && known_eq (bitpos, 0) |
19158 | && !offset ? 2 : 1, |
19159 | context); |
19160 | /* TODO: We can extract value of the small expression via shifting even |
19161 | for nonzero bitpos. */ |
19162 | if (list_ret == 0) |
19163 | return 0; |
19164 | if (!multiple_p (a: bitpos, BITS_PER_UNIT, multiple: &bytepos) |
19165 | || !multiple_p (a: bitsize, BITS_PER_UNIT)) |
19166 | { |
19167 | expansion_failed (expr: loc, NULL_RTX, |
19168 | reason: "bitfield access" ); |
19169 | return 0; |
19170 | } |
19171 | |
19172 | if (offset != NULL_TREE) |
19173 | { |
19174 | /* Variable offset. */ |
19175 | list_ret1 = loc_list_from_tree_1 (loc: offset, want_address: 0, context); |
19176 | if (list_ret1 == 0) |
19177 | return 0; |
19178 | add_loc_list (ret: &list_ret, list: list_ret1); |
19179 | if (!list_ret) |
19180 | return 0; |
19181 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
19182 | } |
19183 | |
19184 | HOST_WIDE_INT value; |
19185 | if (bytepos.is_constant (const_value: &value) && value > 0) |
19186 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_plus_uconst, |
19187 | oprnd1: value, oprnd2: 0)); |
19188 | else if (maybe_ne (a: bytepos, b: 0)) |
19189 | loc_list_plus_const (list_head: list_ret, offset: bytepos); |
19190 | |
19191 | have_address = 1; |
19192 | break; |
19193 | } |
19194 | |
19195 | case INTEGER_CST: |
19196 | if ((want_address || !tree_fits_shwi_p (loc)) |
19197 | && (ret = cst_pool_loc_descr (loc))) |
19198 | have_address = 1; |
19199 | else if (want_address == 2 |
19200 | && tree_fits_shwi_p (loc) |
19201 | && (ret = address_of_int_loc_descriptor |
19202 | (size: int_size_in_bytes (TREE_TYPE (loc)), |
19203 | i: tree_to_shwi (loc)))) |
19204 | have_address = 1; |
19205 | else if (tree_fits_shwi_p (loc)) |
19206 | ret = int_loc_descriptor (poly_i: tree_to_shwi (loc)); |
19207 | else if (tree_fits_uhwi_p (loc)) |
19208 | ret = uint_loc_descriptor (i: tree_to_uhwi (loc)); |
19209 | else |
19210 | { |
19211 | expansion_failed (expr: loc, NULL_RTX, |
19212 | reason: "Integer operand is not host integer" ); |
19213 | return 0; |
19214 | } |
19215 | break; |
19216 | |
19217 | case POLY_INT_CST: |
19218 | { |
19219 | if (want_address) |
19220 | { |
19221 | expansion_failed (expr: loc, NULL_RTX, |
19222 | reason: "constant address with a runtime component" ); |
19223 | return 0; |
19224 | } |
19225 | poly_int64 value; |
19226 | if (!poly_int_tree_p (t: loc, value: &value)) |
19227 | { |
19228 | expansion_failed (expr: loc, NULL_RTX, reason: "constant too big" ); |
19229 | return 0; |
19230 | } |
19231 | ret = int_loc_descriptor (poly_i: value); |
19232 | } |
19233 | break; |
19234 | |
19235 | case CONSTRUCTOR: |
19236 | case REAL_CST: |
19237 | case STRING_CST: |
19238 | case COMPLEX_CST: |
19239 | if ((ret = cst_pool_loc_descr (loc))) |
19240 | have_address = 1; |
19241 | else if (TREE_CODE (loc) == CONSTRUCTOR) |
19242 | { |
19243 | tree type = TREE_TYPE (loc); |
19244 | unsigned HOST_WIDE_INT size = int_size_in_bytes (type); |
19245 | unsigned HOST_WIDE_INT offset = 0; |
19246 | unsigned HOST_WIDE_INT cnt; |
19247 | constructor_elt *ce; |
19248 | |
19249 | if (TREE_CODE (type) == RECORD_TYPE) |
19250 | { |
19251 | /* This is very limited, but it's enough to output |
19252 | pointers to member functions, as long as the |
19253 | referenced function is defined in the current |
19254 | translation unit. */ |
19255 | FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce) |
19256 | { |
19257 | tree val = ce->value; |
19258 | |
19259 | tree field = ce->index; |
19260 | |
19261 | if (val) |
19262 | STRIP_NOPS (val); |
19263 | |
19264 | if (!field || DECL_BIT_FIELD (field)) |
19265 | { |
19266 | expansion_failed (expr: loc, NULL_RTX, |
19267 | reason: "bitfield in record type constructor" ); |
19268 | size = offset = (unsigned HOST_WIDE_INT)-1; |
19269 | ret = NULL; |
19270 | break; |
19271 | } |
19272 | |
19273 | HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field)); |
19274 | unsigned HOST_WIDE_INT pos = int_byte_position (field); |
19275 | gcc_assert (pos + fieldsize <= size); |
19276 | if (pos < offset) |
19277 | { |
19278 | expansion_failed (expr: loc, NULL_RTX, |
19279 | reason: "out-of-order fields in record constructor" ); |
19280 | size = offset = (unsigned HOST_WIDE_INT)-1; |
19281 | ret = NULL; |
19282 | break; |
19283 | } |
19284 | if (pos > offset) |
19285 | { |
19286 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: pos - offset, oprnd2: 0); |
19287 | add_loc_descr (list_head: &ret, descr: ret1); |
19288 | offset = pos; |
19289 | } |
19290 | if (val && fieldsize != 0) |
19291 | { |
19292 | ret1 = loc_descriptor_from_tree (val, want_address, context); |
19293 | if (!ret1) |
19294 | { |
19295 | expansion_failed (expr: loc, NULL_RTX, |
19296 | reason: "unsupported expression in field" ); |
19297 | size = offset = (unsigned HOST_WIDE_INT)-1; |
19298 | ret = NULL; |
19299 | break; |
19300 | } |
19301 | add_loc_descr (list_head: &ret, descr: ret1); |
19302 | } |
19303 | if (fieldsize) |
19304 | { |
19305 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: fieldsize, oprnd2: 0); |
19306 | add_loc_descr (list_head: &ret, descr: ret1); |
19307 | offset = pos + fieldsize; |
19308 | } |
19309 | } |
19310 | |
19311 | if (offset != size) |
19312 | { |
19313 | ret1 = new_loc_descr (op: DW_OP_piece, oprnd1: size - offset, oprnd2: 0); |
19314 | add_loc_descr (list_head: &ret, descr: ret1); |
19315 | offset = size; |
19316 | } |
19317 | |
19318 | have_address = !!want_address; |
19319 | } |
19320 | else |
19321 | expansion_failed (expr: loc, NULL_RTX, |
19322 | reason: "constructor of non-record type" ); |
19323 | } |
19324 | else |
19325 | /* We can construct small constants here using int_loc_descriptor. */ |
19326 | expansion_failed (expr: loc, NULL_RTX, |
19327 | reason: "constructor or constant not in constant pool" ); |
19328 | break; |
19329 | |
19330 | case TRUTH_AND_EXPR: |
19331 | case TRUTH_ANDIF_EXPR: |
19332 | case BIT_AND_EXPR: |
19333 | op = DW_OP_and; |
19334 | goto do_binop; |
19335 | |
19336 | case TRUTH_XOR_EXPR: |
19337 | case BIT_XOR_EXPR: |
19338 | op = DW_OP_xor; |
19339 | goto do_binop; |
19340 | |
19341 | case TRUTH_OR_EXPR: |
19342 | case TRUTH_ORIF_EXPR: |
19343 | case BIT_IOR_EXPR: |
19344 | op = DW_OP_or; |
19345 | goto do_binop; |
19346 | |
19347 | case EXACT_DIV_EXPR: |
19348 | case FLOOR_DIV_EXPR: |
19349 | case TRUNC_DIV_EXPR: |
19350 | /* Turn a divide by a power of 2 into a shift when possible. */ |
19351 | if (TYPE_UNSIGNED (TREE_TYPE (loc)) |
19352 | && tree_fits_uhwi_p (TREE_OPERAND (loc, 1))) |
19353 | { |
19354 | const int log2 = exact_log2 (x: tree_to_uhwi (TREE_OPERAND (loc, 1))); |
19355 | if (log2 > 0) |
19356 | { |
19357 | list_ret |
19358 | = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19359 | if (list_ret == 0) |
19360 | return 0; |
19361 | |
19362 | add_loc_descr_to_each (list: list_ret, ref: uint_loc_descriptor (i: log2)); |
19363 | add_loc_descr_to_each (list: list_ret, |
19364 | ref: new_loc_descr (op: DW_OP_shr, oprnd1: 0, oprnd2: 0)); |
19365 | break; |
19366 | } |
19367 | } |
19368 | |
19369 | /* fall through */ |
19370 | |
19371 | case CEIL_DIV_EXPR: |
19372 | case ROUND_DIV_EXPR: |
19373 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
19374 | { |
19375 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (loc)); |
19376 | scalar_int_mode int_mode; |
19377 | |
19378 | if ((dwarf_strict && dwarf_version < 5) |
19379 | || !is_a <scalar_int_mode> (m: mode, result: &int_mode)) |
19380 | return 0; |
19381 | |
19382 | /* We can use a signed divide if the sign bit is not set. */ |
19383 | if (GET_MODE_SIZE (mode: int_mode) < DWARF2_ADDR_SIZE) |
19384 | { |
19385 | op = DW_OP_div; |
19386 | goto do_binop; |
19387 | } |
19388 | |
19389 | list_ret = typed_binop_from_tree (op: DW_OP_div, loc, |
19390 | type_die: base_type_for_mode (mode: int_mode, unsignedp: 1), |
19391 | mode: int_mode, context); |
19392 | break; |
19393 | } |
19394 | op = DW_OP_div; |
19395 | goto do_binop; |
19396 | |
19397 | case MINUS_EXPR: |
19398 | op = DW_OP_minus; |
19399 | goto do_binop; |
19400 | |
19401 | case FLOOR_MOD_EXPR: |
19402 | case CEIL_MOD_EXPR: |
19403 | case ROUND_MOD_EXPR: |
19404 | case TRUNC_MOD_EXPR: |
19405 | if (TYPE_UNSIGNED (TREE_TYPE (loc))) |
19406 | { |
19407 | op = DW_OP_mod; |
19408 | goto do_binop; |
19409 | } |
19410 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19411 | list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
19412 | if (list_ret == 0 || list_ret1 == 0) |
19413 | return 0; |
19414 | |
19415 | add_loc_list (ret: &list_ret, list: list_ret1); |
19416 | if (list_ret == 0) |
19417 | return 0; |
19418 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
19419 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_over, oprnd1: 0, oprnd2: 0)); |
19420 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_div, oprnd1: 0, oprnd2: 0)); |
19421 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_mul, oprnd1: 0, oprnd2: 0)); |
19422 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
19423 | break; |
19424 | |
19425 | case MULT_EXPR: |
19426 | op = DW_OP_mul; |
19427 | goto do_binop; |
19428 | |
19429 | case LSHIFT_EXPR: |
19430 | op = DW_OP_shl; |
19431 | goto do_binop; |
19432 | |
19433 | case RSHIFT_EXPR: |
19434 | op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra); |
19435 | goto do_binop; |
19436 | |
19437 | case POINTER_PLUS_EXPR: |
19438 | case PLUS_EXPR: |
19439 | do_plus: |
19440 | if (tree_fits_shwi_p (TREE_OPERAND (loc, 1))) |
19441 | { |
19442 | /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be |
19443 | smarter to encode their opposite. The DW_OP_plus_uconst operation |
19444 | takes 1 + X bytes, X being the size of the ULEB128 addend. On the |
19445 | other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y |
19446 | bytes, Y being the size of the operation that pushes the opposite |
19447 | of the addend. So let's choose the smallest representation. */ |
19448 | const tree tree_addend = TREE_OPERAND (loc, 1); |
19449 | offset_int wi_addend; |
19450 | HOST_WIDE_INT shwi_addend; |
19451 | dw_loc_descr_ref loc_naddend; |
19452 | |
19453 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19454 | if (list_ret == 0) |
19455 | return 0; |
19456 | |
19457 | /* Try to get the literal to push. It is the opposite of the addend, |
19458 | so as we rely on wrapping during DWARF evaluation, first decode |
19459 | the literal as a "DWARF-sized" signed number. */ |
19460 | wi_addend = wi::to_offset (t: tree_addend); |
19461 | wi_addend = wi::sext (x: wi_addend, DWARF2_ADDR_SIZE * 8); |
19462 | shwi_addend = wi_addend.to_shwi (); |
19463 | loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT)) |
19464 | ? int_loc_descriptor (poly_i: -shwi_addend) |
19465 | : NULL; |
19466 | |
19467 | if (loc_naddend != NULL |
19468 | && ((unsigned) size_of_uleb128 (shwi_addend) |
19469 | > size_of_loc_descr (loc: loc_naddend))) |
19470 | { |
19471 | add_loc_descr_to_each (list: list_ret, ref: loc_naddend); |
19472 | add_loc_descr_to_each (list: list_ret, |
19473 | ref: new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0)); |
19474 | } |
19475 | else |
19476 | { |
19477 | for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; ) |
19478 | { |
19479 | loc_naddend = loc_cur; |
19480 | loc_cur = loc_cur->dw_loc_next; |
19481 | ggc_free (loc_naddend); |
19482 | } |
19483 | loc_list_plus_const (list_head: list_ret, offset: wi_addend.to_shwi ()); |
19484 | } |
19485 | break; |
19486 | } |
19487 | |
19488 | op = DW_OP_plus; |
19489 | goto do_binop; |
19490 | |
19491 | case LE_EXPR: |
19492 | op = DW_OP_le; |
19493 | goto do_comp_binop; |
19494 | |
19495 | case GE_EXPR: |
19496 | op = DW_OP_ge; |
19497 | goto do_comp_binop; |
19498 | |
19499 | case LT_EXPR: |
19500 | op = DW_OP_lt; |
19501 | goto do_comp_binop; |
19502 | |
19503 | case GT_EXPR: |
19504 | op = DW_OP_gt; |
19505 | goto do_comp_binop; |
19506 | |
19507 | do_comp_binop: |
19508 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0)))) |
19509 | { |
19510 | list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context); |
19511 | list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context); |
19512 | list_ret = loc_list_from_uint_comparison (left: list_ret, right: list_ret1, |
19513 | TREE_CODE (loc)); |
19514 | break; |
19515 | } |
19516 | else |
19517 | goto do_binop; |
19518 | |
19519 | case EQ_EXPR: |
19520 | op = DW_OP_eq; |
19521 | goto do_binop; |
19522 | |
19523 | case NE_EXPR: |
19524 | op = DW_OP_ne; |
19525 | goto do_binop; |
19526 | |
19527 | do_binop: |
19528 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19529 | list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
19530 | if (list_ret == 0 || list_ret1 == 0) |
19531 | return 0; |
19532 | |
19533 | add_loc_list (ret: &list_ret, list: list_ret1); |
19534 | if (list_ret == 0) |
19535 | return 0; |
19536 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
19537 | break; |
19538 | |
19539 | case TRUTH_NOT_EXPR: |
19540 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19541 | if (list_ret == 0) |
19542 | return 0; |
19543 | |
19544 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_lit0, oprnd1: 0, oprnd2: 0)); |
19545 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op: DW_OP_eq, oprnd1: 0, oprnd2: 0)); |
19546 | break; |
19547 | |
19548 | case BIT_NOT_EXPR: |
19549 | op = DW_OP_not; |
19550 | goto do_unop; |
19551 | |
19552 | case ABS_EXPR: |
19553 | op = DW_OP_abs; |
19554 | goto do_unop; |
19555 | |
19556 | case NEGATE_EXPR: |
19557 | op = DW_OP_neg; |
19558 | goto do_unop; |
19559 | |
19560 | do_unop: |
19561 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19562 | if (list_ret == 0) |
19563 | return 0; |
19564 | |
19565 | add_loc_descr_to_each (list: list_ret, ref: new_loc_descr (op, oprnd1: 0, oprnd2: 0)); |
19566 | break; |
19567 | |
19568 | case MIN_EXPR: |
19569 | case MAX_EXPR: |
19570 | { |
19571 | const enum tree_code code = |
19572 | TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR; |
19573 | |
19574 | loc = build3 (COND_EXPR, TREE_TYPE (loc), |
19575 | build2 (code, integer_type_node, |
19576 | TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)), |
19577 | TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0)); |
19578 | } |
19579 | |
19580 | /* fall through */ |
19581 | |
19582 | case COND_EXPR: |
19583 | { |
19584 | dw_loc_descr_ref lhs |
19585 | = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context); |
19586 | dw_loc_list_ref rhs |
19587 | = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), want_address: 0, context); |
19588 | dw_loc_descr_ref bra_node, jump_node, tmp; |
19589 | |
19590 | /* DW_OP_bra is branch-on-nonzero so avoid doing useless work. */ |
19591 | if (TREE_CODE (TREE_OPERAND (loc, 0)) == NE_EXPR |
19592 | && integer_zerop (TREE_OPERAND (TREE_OPERAND (loc, 0), 1))) |
19593 | list_ret |
19594 | = loc_list_from_tree_1 (TREE_OPERAND (TREE_OPERAND (loc, 0), 0), |
19595 | want_address: 0, context); |
19596 | /* Likewise, swap the operands for a logically negated condition. */ |
19597 | else if (TREE_CODE (TREE_OPERAND (loc, 0)) == TRUTH_NOT_EXPR) |
19598 | { |
19599 | lhs = loc_descriptor_from_tree (TREE_OPERAND (loc, 2), 0, context); |
19600 | rhs = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address: 0, context); |
19601 | list_ret |
19602 | = loc_list_from_tree_1 (TREE_OPERAND (TREE_OPERAND (loc, 0), 0), |
19603 | want_address: 0, context); |
19604 | } |
19605 | else |
19606 | list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address: 0, context); |
19607 | if (list_ret == 0 || lhs == 0 || rhs == 0) |
19608 | return 0; |
19609 | |
19610 | bra_node = new_loc_descr (op: DW_OP_bra, oprnd1: 0, oprnd2: 0); |
19611 | add_loc_descr_to_each (list: list_ret, ref: bra_node); |
19612 | |
19613 | add_loc_list (ret: &list_ret, list: rhs); |
19614 | jump_node = new_loc_descr (op: DW_OP_skip, oprnd1: 0, oprnd2: 0); |
19615 | add_loc_descr_to_each (list: list_ret, ref: jump_node); |
19616 | |
19617 | add_loc_descr_to_each (list: list_ret, ref: lhs); |
19618 | bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
19619 | bra_node->dw_loc_oprnd1.v.val_loc = lhs; |
19620 | |
19621 | /* ??? Need a node to point the skip at. Use a nop. */ |
19622 | tmp = new_loc_descr (op: DW_OP_nop, oprnd1: 0, oprnd2: 0); |
19623 | add_loc_descr_to_each (list: list_ret, ref: tmp); |
19624 | jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc; |
19625 | jump_node->dw_loc_oprnd1.v.val_loc = tmp; |
19626 | } |
19627 | break; |
19628 | |
19629 | case FIX_TRUNC_EXPR: |
19630 | return 0; |
19631 | |
19632 | case COMPOUND_LITERAL_EXPR: |
19633 | return loc_list_from_tree_1 (COMPOUND_LITERAL_EXPR_DECL (loc), |
19634 | want_address: 0, context); |
19635 | |
19636 | default: |
19637 | /* Leave front-end specific codes as simply unknown. This comes |
19638 | up, for instance, with the C STMT_EXPR. */ |
19639 | if ((unsigned int) TREE_CODE (loc) |
19640 | >= (unsigned int) LAST_AND_UNUSED_TREE_CODE) |
19641 | { |
19642 | expansion_failed (expr: loc, NULL_RTX, |
19643 | reason: "language specific tree node" ); |
19644 | return 0; |
19645 | } |
19646 | |
19647 | /* Otherwise this is a generic code; we should just lists all of |
19648 | these explicitly. We forgot one. */ |
19649 | if (flag_checking) |
19650 | gcc_unreachable (); |
19651 | |
19652 | /* In a release build, we want to degrade gracefully: better to |
19653 | generate incomplete debugging information than to crash. */ |
19654 | return NULL; |
19655 | } |
19656 | |
19657 | if (!ret && !list_ret) |
19658 | return 0; |
19659 | |
19660 | if (want_address == 2 && !have_address |
19661 | && (dwarf_version >= 4 || !dwarf_strict)) |
19662 | { |
19663 | if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE) |
19664 | { |
19665 | expansion_failed (expr: loc, NULL_RTX, |
19666 | reason: "DWARF address size mismatch" ); |
19667 | return 0; |
19668 | } |
19669 | if (ret) |
19670 | add_loc_descr (list_head: &ret, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
19671 | else |
19672 | add_loc_descr_to_each (list: list_ret, |
19673 | ref: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
19674 | have_address = 1; |
19675 | } |
19676 | /* Show if we can't fill the request for an address. */ |
19677 | if (want_address && !have_address) |
19678 | { |
19679 | expansion_failed (expr: loc, NULL_RTX, |
19680 | reason: "Want address and only have value" ); |
19681 | return 0; |
19682 | } |
19683 | |
19684 | gcc_assert (!ret || !list_ret); |
19685 | |
19686 | /* If we've got an address and don't want one, dereference. */ |
19687 | if (!want_address && have_address) |
19688 | { |
19689 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc)); |
19690 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (loc)); |
19691 | scalar_int_mode int_mode; |
19692 | dw_die_ref type_die; |
19693 | dw_loc_descr_ref deref; |
19694 | |
19695 | /* If the size is greater than DWARF2_ADDR_SIZE, bail out. */ |
19696 | if (size > DWARF2_ADDR_SIZE || size == -1) |
19697 | { |
19698 | expansion_failed (expr: loc, NULL_RTX, |
19699 | reason: "DWARF address size mismatch" ); |
19700 | return 0; |
19701 | } |
19702 | |
19703 | /* If it is equal to DWARF2_ADDR_SIZE, extension does not matter. */ |
19704 | else if (size == DWARF2_ADDR_SIZE) |
19705 | deref = new_loc_descr (op: DW_OP_deref, oprnd1: size, oprnd2: 0); |
19706 | |
19707 | /* If it is lower than DWARF2_ADDR_SIZE, DW_OP_deref_size will zero- |
19708 | extend the value, which is really OK for unsigned types only. */ |
19709 | else if (!(context && context->strict_signedness) |
19710 | || TYPE_UNSIGNED (TREE_TYPE (loc)) |
19711 | || (dwarf_strict && dwarf_version < 5) |
19712 | || !is_a <scalar_int_mode> (m: mode, result: &int_mode) |
19713 | || !(type_die = base_type_for_mode (mode, unsignedp: false))) |
19714 | deref = new_loc_descr (op: DW_OP_deref_size, oprnd1: size, oprnd2: 0); |
19715 | |
19716 | /* Use DW_OP_deref_type for signed integral types if possible, but |
19717 | convert back to the generic type to avoid type mismatches later. */ |
19718 | else |
19719 | { |
19720 | deref = new_loc_descr (op: dwarf_OP (op: DW_OP_deref_type), oprnd1: size, oprnd2: 0); |
19721 | deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref; |
19722 | deref->dw_loc_oprnd2.v.val_die_ref.die = type_die; |
19723 | deref->dw_loc_oprnd2.v.val_die_ref.external = 0; |
19724 | add_loc_descr (list_head: &deref, |
19725 | descr: new_loc_descr (op: dwarf_OP (op: DW_OP_convert), oprnd1: 0, oprnd2: 0)); |
19726 | } |
19727 | |
19728 | if (ret) |
19729 | add_loc_descr (list_head: &ret, descr: deref); |
19730 | else |
19731 | add_loc_descr_to_each (list: list_ret, ref: deref); |
19732 | } |
19733 | |
19734 | if (ret) |
19735 | list_ret = new_loc_list (expr: ret, NULL, vbegin: 0, NULL, vend: 0, NULL); |
19736 | |
19737 | return list_ret; |
19738 | } |
19739 | |
19740 | /* Likewise, but strip useless DW_OP_nop operations in the resulting |
19741 | expressions. */ |
19742 | |
19743 | static dw_loc_list_ref |
19744 | loc_list_from_tree (tree loc, int want_address, |
19745 | struct loc_descr_context *context) |
19746 | { |
19747 | dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context); |
19748 | |
19749 | for (dw_loc_list_ref loc_cur = result; |
19750 | loc_cur != NULL; loc_cur = loc_cur->dw_loc_next) |
19751 | loc_descr_without_nops (loc&: loc_cur->expr); |
19752 | return result; |
19753 | } |
19754 | |
19755 | /* Same as above but return only single location expression. */ |
19756 | static dw_loc_descr_ref |
19757 | loc_descriptor_from_tree (tree loc, int want_address, |
19758 | struct loc_descr_context *context) |
19759 | { |
19760 | dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context); |
19761 | if (!ret) |
19762 | return NULL; |
19763 | if (ret->dw_loc_next) |
19764 | { |
19765 | expansion_failed (expr: loc, NULL_RTX, |
19766 | reason: "Location list where only loc descriptor needed" ); |
19767 | return NULL; |
19768 | } |
19769 | return ret->expr; |
19770 | } |
19771 | |
19772 | /* Given a pointer to what is assumed to be a FIELD_DECL node, return a |
19773 | pointer to the declared type for the relevant field variable, or return |
19774 | `integer_type_node' if the given node turns out to be an |
19775 | ERROR_MARK node. */ |
19776 | |
19777 | static inline tree |
19778 | field_type (const_tree decl) |
19779 | { |
19780 | tree type; |
19781 | |
19782 | if (TREE_CODE (decl) == ERROR_MARK) |
19783 | return integer_type_node; |
19784 | |
19785 | type = DECL_BIT_FIELD_TYPE (decl); |
19786 | if (type == NULL_TREE) |
19787 | type = TREE_TYPE (decl); |
19788 | |
19789 | return type; |
19790 | } |
19791 | |
19792 | /* Given a pointer to a tree node, return the alignment in bits for |
19793 | it, or else return BITS_PER_WORD if the node actually turns out to |
19794 | be an ERROR_MARK node. */ |
19795 | |
19796 | static inline unsigned |
19797 | simple_type_align_in_bits (const_tree type) |
19798 | { |
19799 | return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD; |
19800 | } |
19801 | |
19802 | static inline unsigned |
19803 | simple_decl_align_in_bits (const_tree decl) |
19804 | { |
19805 | return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD; |
19806 | } |
19807 | |
19808 | /* Return the result of rounding T up to ALIGN. */ |
19809 | |
19810 | static inline offset_int |
19811 | round_up_to_align (const offset_int &t, unsigned int align) |
19812 | { |
19813 | return wi::udiv_trunc (x: t + align - 1, y: align) * align; |
19814 | } |
19815 | |
19816 | /* Helper structure for RECORD_TYPE processing. */ |
19817 | struct vlr_context |
19818 | { |
19819 | /* Root RECORD_TYPE. It is needed to generate data member location |
19820 | descriptions in variable-length records (VLR), but also to cope with |
19821 | variants, which are composed of nested structures multiplexed with |
19822 | QUAL_UNION_TYPE nodes. Each time such a structure is passed to a |
19823 | function processing a FIELD_DECL, it is required to be non null. */ |
19824 | tree struct_type; |
19825 | |
19826 | /* When generating a variant part in a RECORD_TYPE (i.e. a nested |
19827 | QUAL_UNION_TYPE), this holds an expression that computes the offset for |
19828 | this variant part as part of the root record (in storage units). For |
19829 | regular records, it must be NULL_TREE. */ |
19830 | tree variant_part_offset; |
19831 | }; |
19832 | |
19833 | /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest |
19834 | addressed byte of the "containing object" for the given FIELD_DECL. If |
19835 | possible, return a native constant through CST_OFFSET (in which case NULL is |
19836 | returned); otherwise return a DWARF expression that computes the offset. |
19837 | |
19838 | Set *CST_OFFSET to 0 and return NULL if we are unable to determine what |
19839 | that offset is, either because the argument turns out to be a pointer to an |
19840 | ERROR_MARK node, or because the offset expression is too complex for us. |
19841 | |
19842 | CTX is required: see the comment for VLR_CONTEXT. */ |
19843 | |
19844 | static dw_loc_descr_ref |
19845 | field_byte_offset (const_tree decl, struct vlr_context *ctx, |
19846 | HOST_WIDE_INT *cst_offset) |
19847 | { |
19848 | tree tree_result; |
19849 | dw_loc_list_ref loc_result; |
19850 | |
19851 | *cst_offset = 0; |
19852 | |
19853 | if (TREE_CODE (decl) == ERROR_MARK) |
19854 | return NULL; |
19855 | else |
19856 | gcc_assert (TREE_CODE (decl) == FIELD_DECL); |
19857 | |
19858 | /* We cannot handle variable bit offsets at the moment, so abort if it's the |
19859 | case. */ |
19860 | if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST) |
19861 | return NULL; |
19862 | |
19863 | /* We used to handle only constant offsets in all cases. Now, we handle |
19864 | properly dynamic byte offsets only when PCC bitfield type doesn't |
19865 | matter. */ |
19866 | if (PCC_BITFIELD_TYPE_MATTERS |
19867 | && DECL_BIT_FIELD_TYPE (decl) |
19868 | && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST) |
19869 | { |
19870 | offset_int object_offset_in_bits; |
19871 | offset_int object_offset_in_bytes; |
19872 | offset_int bitpos_int; |
19873 | tree type; |
19874 | tree field_size_tree; |
19875 | offset_int deepest_bitpos; |
19876 | offset_int field_size_in_bits; |
19877 | unsigned int type_align_in_bits; |
19878 | unsigned int decl_align_in_bits; |
19879 | offset_int type_size_in_bits; |
19880 | |
19881 | bitpos_int = wi::to_offset (t: bit_position (decl)); |
19882 | type = field_type (decl); |
19883 | type_size_in_bits = offset_int_type_size_in_bits (type); |
19884 | type_align_in_bits = simple_type_align_in_bits (type); |
19885 | |
19886 | field_size_tree = DECL_SIZE (decl); |
19887 | |
19888 | /* The size could be unspecified if there was an error, or for |
19889 | a flexible array member. */ |
19890 | if (!field_size_tree) |
19891 | field_size_tree = bitsize_zero_node; |
19892 | |
19893 | /* If the size of the field is not constant, use the type size. */ |
19894 | if (TREE_CODE (field_size_tree) == INTEGER_CST) |
19895 | field_size_in_bits = wi::to_offset (t: field_size_tree); |
19896 | else |
19897 | field_size_in_bits = type_size_in_bits; |
19898 | |
19899 | decl_align_in_bits = simple_decl_align_in_bits (decl); |
19900 | |
19901 | /* The GCC front-end doesn't make any attempt to keep track of the |
19902 | starting bit offset (relative to the start of the containing |
19903 | structure type) of the hypothetical "containing object" for a |
19904 | bit-field. Thus, when computing the byte offset value for the |
19905 | start of the "containing object" of a bit-field, we must deduce |
19906 | this information on our own. This can be rather tricky to do in |
19907 | some cases. For example, handling the following structure type |
19908 | definition when compiling for an i386/i486 target (which only |
19909 | aligns long long's to 32-bit boundaries) can be very tricky: |
19910 | |
19911 | struct S { int field1; long long field2:31; }; |
19912 | |
19913 | Fortunately, there is a simple rule-of-thumb which can be used |
19914 | in such cases. When compiling for an i386/i486, GCC will |
19915 | allocate 8 bytes for the structure shown above. It decides to |
19916 | do this based upon one simple rule for bit-field allocation. |
19917 | GCC allocates each "containing object" for each bit-field at |
19918 | the first (i.e. lowest addressed) legitimate alignment boundary |
19919 | (based upon the required minimum alignment for the declared |
19920 | type of the field) which it can possibly use, subject to the |
19921 | condition that there is still enough available space remaining |
19922 | in the containing object (when allocated at the selected point) |
19923 | to fully accommodate all of the bits of the bit-field itself. |
19924 | |
19925 | This simple rule makes it obvious why GCC allocates 8 bytes for |
19926 | each object of the structure type shown above. When looking |
19927 | for a place to allocate the "containing object" for `field2', |
19928 | the compiler simply tries to allocate a 64-bit "containing |
19929 | object" at each successive 32-bit boundary (starting at zero) |
19930 | until it finds a place to allocate that 64- bit field such that |
19931 | at least 31 contiguous (and previously unallocated) bits remain |
19932 | within that selected 64 bit field. (As it turns out, for the |
19933 | example above, the compiler finds it is OK to allocate the |
19934 | "containing object" 64-bit field at bit-offset zero within the |
19935 | structure type.) |
19936 | |
19937 | Here we attempt to work backwards from the limited set of facts |
19938 | we're given, and we try to deduce from those facts, where GCC |
19939 | must have believed that the containing object started (within |
19940 | the structure type). The value we deduce is then used (by the |
19941 | callers of this routine) to generate DW_AT_location and |
19942 | DW_AT_bit_offset attributes for fields (both bit-fields and, in |
19943 | the case of DW_AT_location, regular fields as well). */ |
19944 | |
19945 | /* Figure out the bit-distance from the start of the structure to |
19946 | the "deepest" bit of the bit-field. */ |
19947 | deepest_bitpos = bitpos_int + field_size_in_bits; |
19948 | |
19949 | /* This is the tricky part. Use some fancy footwork to deduce |
19950 | where the lowest addressed bit of the containing object must |
19951 | be. */ |
19952 | object_offset_in_bits = deepest_bitpos - type_size_in_bits; |
19953 | |
19954 | /* Round up to type_align by default. This works best for |
19955 | bitfields. */ |
19956 | object_offset_in_bits |
19957 | = round_up_to_align (t: object_offset_in_bits, align: type_align_in_bits); |
19958 | |
19959 | if (wi::gtu_p (x: object_offset_in_bits, y: bitpos_int)) |
19960 | { |
19961 | object_offset_in_bits = deepest_bitpos - type_size_in_bits; |
19962 | |
19963 | /* Round up to decl_align instead. */ |
19964 | object_offset_in_bits |
19965 | = round_up_to_align (t: object_offset_in_bits, align: decl_align_in_bits); |
19966 | } |
19967 | |
19968 | object_offset_in_bytes |
19969 | = wi::lrshift (x: object_offset_in_bits, LOG2_BITS_PER_UNIT); |
19970 | if (ctx->variant_part_offset == NULL_TREE) |
19971 | { |
19972 | *cst_offset = object_offset_in_bytes.to_shwi (); |
19973 | return NULL; |
19974 | } |
19975 | tree_result = wide_int_to_tree (sizetype, cst: object_offset_in_bytes); |
19976 | } |
19977 | else |
19978 | tree_result = byte_position (decl); |
19979 | |
19980 | if (ctx->variant_part_offset != NULL_TREE) |
19981 | tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result), |
19982 | ctx->variant_part_offset, tree_result); |
19983 | |
19984 | /* If the byte offset is a constant, it's simplier to handle a native |
19985 | constant rather than a DWARF expression. */ |
19986 | if (TREE_CODE (tree_result) == INTEGER_CST) |
19987 | { |
19988 | *cst_offset = wi::to_offset (t: tree_result).to_shwi (); |
19989 | return NULL; |
19990 | } |
19991 | |
19992 | struct loc_descr_context loc_ctx = { |
19993 | .context_type: ctx->struct_type, /* context_type */ |
19994 | NULL_TREE, /* base_decl */ |
19995 | NULL, /* dpi */ |
19996 | .placeholder_arg: false, /* placeholder_arg */ |
19997 | .placeholder_seen: false, /* placeholder_seen */ |
19998 | .strict_signedness: false /* strict_signedness */ |
19999 | }; |
20000 | loc_result = loc_list_from_tree (loc: tree_result, want_address: 0, context: &loc_ctx); |
20001 | |
20002 | /* We want a DWARF expression: abort if we only have a location list with |
20003 | multiple elements. */ |
20004 | if (!loc_result || !single_element_loc_list_p (list: loc_result)) |
20005 | return NULL; |
20006 | else |
20007 | return loc_result->expr; |
20008 | } |
20009 | |
20010 | /* The following routines define various Dwarf attributes and any data |
20011 | associated with them. */ |
20012 | |
20013 | /* Add a location description attribute value to a DIE. |
20014 | |
20015 | This emits location attributes suitable for whole variables and |
20016 | whole parameters. Note that the location attributes for struct fields are |
20017 | generated by the routine `data_member_location_attribute' below. */ |
20018 | |
20019 | static inline void |
20020 | add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind, |
20021 | dw_loc_list_ref descr) |
20022 | { |
20023 | bool check_no_locviews = true; |
20024 | if (descr == 0) |
20025 | return; |
20026 | if (single_element_loc_list_p (list: descr)) |
20027 | add_AT_loc (die, attr_kind, loc: descr->expr); |
20028 | else |
20029 | { |
20030 | add_AT_loc_list (die, attr_kind, loc_list: descr); |
20031 | gcc_assert (descr->ll_symbol); |
20032 | if (attr_kind == DW_AT_location && descr->vl_symbol |
20033 | && dwarf2out_locviews_in_attribute ()) |
20034 | { |
20035 | add_AT_view_list (die, attr_kind: DW_AT_GNU_locviews); |
20036 | check_no_locviews = false; |
20037 | } |
20038 | } |
20039 | |
20040 | if (check_no_locviews) |
20041 | gcc_assert (!get_AT (die, DW_AT_GNU_locviews)); |
20042 | } |
20043 | |
20044 | /* Add DW_AT_accessibility attribute to DIE if needed. */ |
20045 | |
20046 | static void |
20047 | add_accessibility_attribute (dw_die_ref die, tree decl) |
20048 | { |
20049 | /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type |
20050 | children, otherwise the default is DW_ACCESS_public. In DWARF2 |
20051 | the default has always been DW_ACCESS_public. */ |
20052 | if (TREE_PROTECTED (decl)) |
20053 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_protected); |
20054 | else if (TREE_PRIVATE (decl)) |
20055 | { |
20056 | if (dwarf_version == 2 |
20057 | || die->die_parent == NULL |
20058 | || die->die_parent->die_tag != DW_TAG_class_type) |
20059 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_private); |
20060 | } |
20061 | else if (dwarf_version > 2 |
20062 | && die->die_parent |
20063 | && die->die_parent->die_tag == DW_TAG_class_type) |
20064 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_public); |
20065 | } |
20066 | |
20067 | /* Attach the specialized form of location attribute used for data members of |
20068 | struct and union types. In the special case of a FIELD_DECL node which |
20069 | represents a bit-field, the "offset" part of this special location |
20070 | descriptor must indicate the distance in bytes from the lowest-addressed |
20071 | byte of the containing struct or union type to the lowest-addressed byte of |
20072 | the "containing object" for the bit-field. (See the `field_byte_offset' |
20073 | function above). |
20074 | |
20075 | For any given bit-field, the "containing object" is a hypothetical object |
20076 | (of some integral or enum type) within which the given bit-field lives. The |
20077 | type of this hypothetical "containing object" is always the same as the |
20078 | declared type of the individual bit-field itself (for GCC anyway... the |
20079 | DWARF spec doesn't actually mandate this). Note that it is the size (in |
20080 | bytes) of the hypothetical "containing object" which will be given in the |
20081 | DW_AT_byte_size attribute for this bit-field. (See the |
20082 | `byte_size_attribute' function below.) It is also used when calculating the |
20083 | value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute' |
20084 | function below.) |
20085 | |
20086 | CTX is required: see the comment for VLR_CONTEXT. */ |
20087 | |
20088 | static void |
20089 | add_data_member_location_attribute (dw_die_ref die, |
20090 | tree decl, |
20091 | struct vlr_context *ctx) |
20092 | { |
20093 | HOST_WIDE_INT offset; |
20094 | dw_loc_descr_ref loc_descr = 0; |
20095 | |
20096 | if (TREE_CODE (decl) == TREE_BINFO) |
20097 | { |
20098 | /* We're working on the TAG_inheritance for a base class. */ |
20099 | if (BINFO_VIRTUAL_P (decl) && is_cxx ()) |
20100 | { |
20101 | /* For C++ virtual bases we can't just use BINFO_OFFSET, as they |
20102 | aren't at a fixed offset from all (sub)objects of the same |
20103 | type. We need to extract the appropriate offset from our |
20104 | vtable. The following dwarf expression means |
20105 | |
20106 | BaseAddr = ObAddr + *((*ObAddr) - Offset) |
20107 | |
20108 | This is specific to the V3 ABI, of course. */ |
20109 | |
20110 | dw_loc_descr_ref tmp; |
20111 | |
20112 | /* Make a copy of the object address. */ |
20113 | tmp = new_loc_descr (op: DW_OP_dup, oprnd1: 0, oprnd2: 0); |
20114 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20115 | |
20116 | /* Extract the vtable address. */ |
20117 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
20118 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20119 | |
20120 | /* Calculate the address of the offset. */ |
20121 | offset = tree_to_shwi (BINFO_VPTR_FIELD (decl)); |
20122 | gcc_assert (offset < 0); |
20123 | |
20124 | tmp = int_loc_descriptor (poly_i: -offset); |
20125 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20126 | tmp = new_loc_descr (op: DW_OP_minus, oprnd1: 0, oprnd2: 0); |
20127 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20128 | |
20129 | /* Extract the offset. */ |
20130 | tmp = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
20131 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20132 | |
20133 | /* Add it to the object address. */ |
20134 | tmp = new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0); |
20135 | add_loc_descr (list_head: &loc_descr, descr: tmp); |
20136 | } |
20137 | else |
20138 | offset = tree_to_shwi (BINFO_OFFSET (decl)); |
20139 | } |
20140 | else |
20141 | { |
20142 | loc_descr = field_byte_offset (decl, ctx, cst_offset: &offset); |
20143 | |
20144 | if (!loc_descr) |
20145 | ; |
20146 | |
20147 | /* If loc_descr is available, then we know the offset is dynamic. */ |
20148 | else if (gnat_encodings == DWARF_GNAT_ENCODINGS_ALL) |
20149 | { |
20150 | loc_descr = NULL; |
20151 | offset = 0; |
20152 | } |
20153 | |
20154 | /* Data member location evaluation starts with the base address on the |
20155 | stack. Compute the field offset and add it to this base address. */ |
20156 | else |
20157 | add_loc_descr (list_head: &loc_descr, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
20158 | } |
20159 | |
20160 | if (!loc_descr) |
20161 | { |
20162 | /* While DW_AT_data_bit_offset has been added already in DWARF4, |
20163 | e.g. GDB only added support to it in November 2016. For DWARF5 |
20164 | we need newer debug info consumers anyway. We might change this |
20165 | to dwarf_version >= 4 once most consumers catched up. */ |
20166 | if (dwarf_version >= 5 |
20167 | && TREE_CODE (decl) == FIELD_DECL |
20168 | && DECL_BIT_FIELD_TYPE (decl) |
20169 | && (ctx->variant_part_offset == NULL_TREE |
20170 | || TREE_CODE (ctx->variant_part_offset) == INTEGER_CST)) |
20171 | { |
20172 | tree off = bit_position (decl); |
20173 | if (ctx->variant_part_offset) |
20174 | off = bit_from_pos (ctx->variant_part_offset, off); |
20175 | if (tree_fits_uhwi_p (off) && get_AT (die, attr_kind: DW_AT_bit_size)) |
20176 | { |
20177 | remove_AT (die, attr_kind: DW_AT_byte_size); |
20178 | remove_AT (die, attr_kind: DW_AT_bit_offset); |
20179 | add_AT_unsigned (die, attr_kind: DW_AT_data_bit_offset, unsigned_val: tree_to_uhwi (off)); |
20180 | return; |
20181 | } |
20182 | } |
20183 | if (dwarf_version > 2) |
20184 | { |
20185 | /* Don't need to output a location expression, just the constant. */ |
20186 | if (offset < 0) |
20187 | add_AT_int (die, attr_kind: DW_AT_data_member_location, int_val: offset); |
20188 | else |
20189 | add_AT_unsigned (die, attr_kind: DW_AT_data_member_location, unsigned_val: offset); |
20190 | return; |
20191 | } |
20192 | else |
20193 | { |
20194 | enum dwarf_location_atom op; |
20195 | |
20196 | /* The DWARF2 standard says that we should assume that the structure |
20197 | address is already on the stack, so we can specify a structure |
20198 | field address by using DW_OP_plus_uconst. */ |
20199 | op = DW_OP_plus_uconst; |
20200 | loc_descr = new_loc_descr (op, oprnd1: offset, oprnd2: 0); |
20201 | } |
20202 | } |
20203 | |
20204 | add_AT_loc (die, attr_kind: DW_AT_data_member_location, loc: loc_descr); |
20205 | } |
20206 | |
20207 | /* Writes integer values to dw_vec_const array. */ |
20208 | |
20209 | static void |
20210 | insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest) |
20211 | { |
20212 | while (size != 0) |
20213 | { |
20214 | *dest++ = val & 0xff; |
20215 | val >>= 8; |
20216 | --size; |
20217 | } |
20218 | } |
20219 | |
20220 | /* Reads integers from dw_vec_const array. Inverse of insert_int. */ |
20221 | |
20222 | static HOST_WIDE_INT |
20223 | (const unsigned char *src, unsigned int size) |
20224 | { |
20225 | HOST_WIDE_INT val = 0; |
20226 | |
20227 | src += size; |
20228 | while (size != 0) |
20229 | { |
20230 | val <<= 8; |
20231 | val |= *--src & 0xff; |
20232 | --size; |
20233 | } |
20234 | return val; |
20235 | } |
20236 | |
20237 | /* Writes wide_int values to dw_vec_const array. */ |
20238 | |
20239 | static void |
20240 | insert_wide_int (const wide_int_ref &val, unsigned char *dest, int elt_size) |
20241 | { |
20242 | int i; |
20243 | |
20244 | if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT) |
20245 | { |
20246 | insert_int (val: (HOST_WIDE_INT) val.elt (i: 0), size: elt_size, dest); |
20247 | return; |
20248 | } |
20249 | |
20250 | /* We'd have to extend this code to support odd sizes. */ |
20251 | gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0); |
20252 | |
20253 | int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT); |
20254 | |
20255 | if (WORDS_BIG_ENDIAN) |
20256 | for (i = n - 1; i >= 0; i--) |
20257 | { |
20258 | insert_int (val: (HOST_WIDE_INT) val.elt (i), size: sizeof (HOST_WIDE_INT), dest); |
20259 | dest += sizeof (HOST_WIDE_INT); |
20260 | } |
20261 | else |
20262 | for (i = 0; i < n; i++) |
20263 | { |
20264 | insert_int (val: (HOST_WIDE_INT) val.elt (i), size: sizeof (HOST_WIDE_INT), dest); |
20265 | dest += sizeof (HOST_WIDE_INT); |
20266 | } |
20267 | } |
20268 | |
20269 | /* Writes floating point values to dw_vec_const array. */ |
20270 | |
20271 | static unsigned |
20272 | insert_float (const_rtx rtl, unsigned char *array) |
20273 | { |
20274 | long val[4]; |
20275 | int i; |
20276 | scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl)); |
20277 | |
20278 | real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode); |
20279 | |
20280 | /* real_to_target puts 32-bit pieces in each long. Pack them. */ |
20281 | if (GET_MODE_SIZE (mode) < 4) |
20282 | { |
20283 | gcc_assert (GET_MODE_SIZE (mode) == 2); |
20284 | insert_int (val: val[0], size: 2, dest: array); |
20285 | return 2; |
20286 | } |
20287 | |
20288 | for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++) |
20289 | { |
20290 | insert_int (val: val[i], size: 4, dest: array); |
20291 | array += 4; |
20292 | } |
20293 | return 4; |
20294 | } |
20295 | |
20296 | /* Attach a DW_AT_const_value attribute for a variable or a parameter which |
20297 | does not have a "location" either in memory or in a register. These |
20298 | things can arise in GNU C when a constant is passed as an actual parameter |
20299 | to an inlined function. They can also arise in C++ where declared |
20300 | constants do not necessarily get memory "homes". */ |
20301 | |
20302 | static bool |
20303 | add_const_value_attribute (dw_die_ref die, machine_mode mode, rtx rtl) |
20304 | { |
20305 | scalar_mode int_mode; |
20306 | |
20307 | switch (GET_CODE (rtl)) |
20308 | { |
20309 | case CONST_INT: |
20310 | { |
20311 | HOST_WIDE_INT val = INTVAL (rtl); |
20312 | |
20313 | if (val < 0) |
20314 | add_AT_int (die, attr_kind: DW_AT_const_value, int_val: val); |
20315 | else |
20316 | add_AT_unsigned (die, attr_kind: DW_AT_const_value, unsigned_val: (unsigned HOST_WIDE_INT) val); |
20317 | } |
20318 | return true; |
20319 | |
20320 | case CONST_WIDE_INT: |
20321 | if (is_int_mode (mode, int_mode: &int_mode) |
20322 | && (GET_MODE_PRECISION (mode: int_mode) |
20323 | & (HOST_BITS_PER_WIDE_INT - 1)) == 0) |
20324 | { |
20325 | add_AT_wide (die, attr_kind: DW_AT_const_value, w: rtx_mode_t (rtl, int_mode)); |
20326 | return true; |
20327 | } |
20328 | return false; |
20329 | |
20330 | case CONST_DOUBLE: |
20331 | /* Note that a CONST_DOUBLE rtx could represent either an integer or a |
20332 | floating-point constant. A CONST_DOUBLE is used whenever the |
20333 | constant requires more than one word in order to be adequately |
20334 | represented. */ |
20335 | if (TARGET_SUPPORTS_WIDE_INT == 0 |
20336 | && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl))) |
20337 | add_AT_double (die, attr_kind: DW_AT_const_value, |
20338 | CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl)); |
20339 | else |
20340 | { |
20341 | scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl)); |
20342 | unsigned int length = GET_MODE_SIZE (mode); |
20343 | unsigned char *array = ggc_vec_alloc<unsigned char> (c: length); |
20344 | unsigned int elt_size = insert_float (rtl, array); |
20345 | |
20346 | add_AT_vec (die, attr_kind: DW_AT_const_value, length: length / elt_size, elt_size, |
20347 | array); |
20348 | } |
20349 | return true; |
20350 | |
20351 | case CONST_VECTOR: |
20352 | { |
20353 | unsigned int length; |
20354 | if (!CONST_VECTOR_NUNITS (rtl).is_constant (const_value: &length)) |
20355 | return false; |
20356 | |
20357 | machine_mode mode = GET_MODE (rtl); |
20358 | /* The combination of a length and byte elt_size doesn't extend |
20359 | naturally to boolean vectors, where several elements are packed |
20360 | into the same byte. */ |
20361 | if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL) |
20362 | return false; |
20363 | |
20364 | unsigned int elt_size = GET_MODE_UNIT_SIZE (mode); |
20365 | unsigned char *array |
20366 | = ggc_vec_alloc<unsigned char> (c: length * elt_size); |
20367 | unsigned int i; |
20368 | unsigned char *p; |
20369 | machine_mode imode = GET_MODE_INNER (mode); |
20370 | |
20371 | switch (GET_MODE_CLASS (mode)) |
20372 | { |
20373 | case MODE_VECTOR_INT: |
20374 | for (i = 0, p = array; i < length; i++, p += elt_size) |
20375 | { |
20376 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
20377 | insert_wide_int (val: rtx_mode_t (elt, imode), dest: p, elt_size); |
20378 | } |
20379 | break; |
20380 | |
20381 | case MODE_VECTOR_FLOAT: |
20382 | for (i = 0, p = array; i < length; i++, p += elt_size) |
20383 | { |
20384 | rtx elt = CONST_VECTOR_ELT (rtl, i); |
20385 | insert_float (rtl: elt, array: p); |
20386 | } |
20387 | break; |
20388 | |
20389 | default: |
20390 | gcc_unreachable (); |
20391 | } |
20392 | |
20393 | add_AT_vec (die, attr_kind: DW_AT_const_value, length, elt_size, array); |
20394 | } |
20395 | return true; |
20396 | |
20397 | case CONST_STRING: |
20398 | if (dwarf_version >= 4 || !dwarf_strict) |
20399 | { |
20400 | dw_loc_descr_ref loc_result; |
20401 | resolve_one_addr (&rtl); |
20402 | rtl_addr: |
20403 | loc_result = new_addr_loc_descr (addr: rtl, dtprel: dtprel_false); |
20404 | add_loc_descr (list_head: &loc_result, descr: new_loc_descr (op: DW_OP_stack_value, oprnd1: 0, oprnd2: 0)); |
20405 | add_AT_loc (die, attr_kind: DW_AT_location, loc: loc_result); |
20406 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
20407 | return true; |
20408 | } |
20409 | return false; |
20410 | |
20411 | case CONST: |
20412 | if (CONSTANT_P (XEXP (rtl, 0))) |
20413 | return add_const_value_attribute (die, mode, XEXP (rtl, 0)); |
20414 | /* FALLTHROUGH */ |
20415 | case SYMBOL_REF: |
20416 | if (!const_ok_for_output (rtl)) |
20417 | return false; |
20418 | /* FALLTHROUGH */ |
20419 | case LABEL_REF: |
20420 | if (dwarf_version >= 4 || !dwarf_strict) |
20421 | goto rtl_addr; |
20422 | return false; |
20423 | |
20424 | case PLUS: |
20425 | /* In cases where an inlined instance of an inline function is passed |
20426 | the address of an `auto' variable (which is local to the caller) we |
20427 | can get a situation where the DECL_RTL of the artificial local |
20428 | variable (for the inlining) which acts as a stand-in for the |
20429 | corresponding formal parameter (of the inline function) will look |
20430 | like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not |
20431 | exactly a compile-time constant expression, but it isn't the address |
20432 | of the (artificial) local variable either. Rather, it represents the |
20433 | *value* which the artificial local variable always has during its |
20434 | lifetime. We currently have no way to represent such quasi-constant |
20435 | values in Dwarf, so for now we just punt and generate nothing. */ |
20436 | return false; |
20437 | |
20438 | case HIGH: |
20439 | case CONST_FIXED: |
20440 | case MINUS: |
20441 | case SIGN_EXTEND: |
20442 | case ZERO_EXTEND: |
20443 | case CONST_POLY_INT: |
20444 | return false; |
20445 | |
20446 | case MEM: |
20447 | if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING |
20448 | && MEM_READONLY_P (rtl) |
20449 | && GET_MODE (rtl) == BLKmode) |
20450 | { |
20451 | add_AT_string (die, attr_kind: DW_AT_const_value, XSTR (XEXP (rtl, 0), 0)); |
20452 | return true; |
20453 | } |
20454 | return false; |
20455 | |
20456 | default: |
20457 | /* No other kinds of rtx should be possible here. */ |
20458 | gcc_unreachable (); |
20459 | } |
20460 | } |
20461 | |
20462 | /* Determine whether the evaluation of EXPR references any variables |
20463 | or functions which aren't otherwise used (and therefore may not be |
20464 | output). */ |
20465 | static tree |
20466 | reference_to_unused (tree * tp, int * walk_subtrees, |
20467 | void * data ATTRIBUTE_UNUSED) |
20468 | { |
20469 | if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp)) |
20470 | *walk_subtrees = 0; |
20471 | |
20472 | if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp) |
20473 | && ! TREE_ASM_WRITTEN (*tp)) |
20474 | return *tp; |
20475 | /* ??? The C++ FE emits debug information for using decls, so |
20476 | putting gcc_unreachable here falls over. See PR31899. For now |
20477 | be conservative. */ |
20478 | else if (!symtab->global_info_ready && VAR_P (*tp)) |
20479 | return *tp; |
20480 | else if (VAR_P (*tp)) |
20481 | { |
20482 | varpool_node *node = varpool_node::get (decl: *tp); |
20483 | if (!node || !node->definition) |
20484 | return *tp; |
20485 | } |
20486 | else if (TREE_CODE (*tp) == FUNCTION_DECL |
20487 | && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp))) |
20488 | { |
20489 | /* The call graph machinery must have finished analyzing, |
20490 | optimizing and gimplifying the CU by now. |
20491 | So if *TP has no call graph node associated |
20492 | to it, it means *TP will not be emitted. */ |
20493 | if (!symtab->global_info_ready || !cgraph_node::get (decl: *tp)) |
20494 | return *tp; |
20495 | } |
20496 | else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp)) |
20497 | return *tp; |
20498 | |
20499 | return NULL_TREE; |
20500 | } |
20501 | |
20502 | /* Generate an RTL constant from a decl initializer INIT with decl type TYPE, |
20503 | for use in a later add_const_value_attribute call. */ |
20504 | |
20505 | static rtx |
20506 | rtl_for_decl_init (tree init, tree type) |
20507 | { |
20508 | rtx rtl = NULL_RTX; |
20509 | |
20510 | STRIP_NOPS (init); |
20511 | |
20512 | /* If a variable is initialized with a string constant without embedded |
20513 | zeros, build CONST_STRING. */ |
20514 | if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE) |
20515 | { |
20516 | tree enttype = TREE_TYPE (type); |
20517 | tree domain = TYPE_DOMAIN (type); |
20518 | scalar_int_mode mode; |
20519 | |
20520 | if (is_int_mode (TYPE_MODE (enttype), int_mode: &mode) |
20521 | && GET_MODE_SIZE (mode) == 1 |
20522 | && domain |
20523 | && TYPE_MAX_VALUE (domain) |
20524 | && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST |
20525 | && integer_zerop (TYPE_MIN_VALUE (domain)) |
20526 | && compare_tree_int (TYPE_MAX_VALUE (domain), |
20527 | TREE_STRING_LENGTH (init) - 1) == 0 |
20528 | && ((size_t) TREE_STRING_LENGTH (init) |
20529 | == strlen (TREE_STRING_POINTER (init)) + 1)) |
20530 | { |
20531 | rtl = gen_rtx_CONST_STRING (VOIDmode, |
20532 | ggc_strdup (TREE_STRING_POINTER (init))); |
20533 | rtl = gen_rtx_MEM (BLKmode, rtl); |
20534 | MEM_READONLY_P (rtl) = 1; |
20535 | } |
20536 | } |
20537 | /* Other aggregates, and complex values, could be represented using |
20538 | CONCAT: FIXME! |
20539 | If this changes, please adjust tree_add_const_value_attribute |
20540 | so that for early_dwarf it will for such initializers mangle referenced |
20541 | decls. */ |
20542 | else if (AGGREGATE_TYPE_P (type) |
20543 | || (TREE_CODE (init) == VIEW_CONVERT_EXPR |
20544 | && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0)))) |
20545 | || TREE_CODE (type) == COMPLEX_TYPE) |
20546 | ; |
20547 | /* Vectors only work if their mode is supported by the target. |
20548 | FIXME: generic vectors ought to work too. */ |
20549 | else if (TREE_CODE (type) == VECTOR_TYPE |
20550 | && !VECTOR_MODE_P (TYPE_MODE (type))) |
20551 | ; |
20552 | /* If the initializer is something that we know will expand into an |
20553 | immediate RTL constant, expand it now. We must be careful not to |
20554 | reference variables which won't be output. */ |
20555 | else if (initializer_constant_valid_p (init, type) |
20556 | && ! walk_tree (&init, reference_to_unused, NULL, NULL)) |
20557 | { |
20558 | /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if |
20559 | possible. */ |
20560 | if (TREE_CODE (type) == VECTOR_TYPE) |
20561 | switch (TREE_CODE (init)) |
20562 | { |
20563 | case VECTOR_CST: |
20564 | break; |
20565 | case CONSTRUCTOR: |
20566 | if (TREE_CONSTANT (init)) |
20567 | { |
20568 | vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init); |
20569 | bool constant_p = true; |
20570 | tree value; |
20571 | unsigned HOST_WIDE_INT ix; |
20572 | |
20573 | /* Even when ctor is constant, it might contain non-*_CST |
20574 | elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't |
20575 | belong into VECTOR_CST nodes. */ |
20576 | FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) |
20577 | if (!CONSTANT_CLASS_P (value)) |
20578 | { |
20579 | constant_p = false; |
20580 | break; |
20581 | } |
20582 | |
20583 | if (constant_p) |
20584 | { |
20585 | init = build_vector_from_ctor (type, elts); |
20586 | break; |
20587 | } |
20588 | } |
20589 | /* FALLTHRU */ |
20590 | |
20591 | default: |
20592 | return NULL; |
20593 | } |
20594 | |
20595 | /* Large _BitInt BLKmode INTEGER_CSTs would yield a MEM. */ |
20596 | if (TREE_CODE (init) == INTEGER_CST |
20597 | && TREE_CODE (TREE_TYPE (init)) == BITINT_TYPE |
20598 | && TYPE_MODE (TREE_TYPE (init)) == BLKmode) |
20599 | { |
20600 | if (tree_fits_shwi_p (init)) |
20601 | return GEN_INT (tree_to_shwi (init)); |
20602 | else |
20603 | return NULL; |
20604 | } |
20605 | |
20606 | rtl = expand_expr (exp: init, NULL_RTX, VOIDmode, modifier: EXPAND_INITIALIZER); |
20607 | |
20608 | /* If expand_expr returns a MEM, it wasn't immediate. */ |
20609 | gcc_assert (!rtl || !MEM_P (rtl)); |
20610 | } |
20611 | |
20612 | return rtl; |
20613 | } |
20614 | |
20615 | /* Generate RTL for the variable DECL to represent its location. */ |
20616 | |
20617 | static rtx |
20618 | rtl_for_decl_location (tree decl) |
20619 | { |
20620 | rtx rtl; |
20621 | |
20622 | /* Here we have to decide where we are going to say the parameter "lives" |
20623 | (as far as the debugger is concerned). We only have a couple of |
20624 | choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL. |
20625 | |
20626 | DECL_RTL normally indicates where the parameter lives during most of the |
20627 | activation of the function. If optimization is enabled however, this |
20628 | could be either NULL or else a pseudo-reg. Both of those cases indicate |
20629 | that the parameter doesn't really live anywhere (as far as the code |
20630 | generation parts of GCC are concerned) during most of the function's |
20631 | activation. That will happen (for example) if the parameter is never |
20632 | referenced within the function. |
20633 | |
20634 | We could just generate a location descriptor here for all non-NULL |
20635 | non-pseudo values of DECL_RTL and ignore all of the rest, but we can be |
20636 | a little nicer than that if we also consider DECL_INCOMING_RTL in cases |
20637 | where DECL_RTL is NULL or is a pseudo-reg. |
20638 | |
20639 | Note however that we can only get away with using DECL_INCOMING_RTL as |
20640 | a backup substitute for DECL_RTL in certain limited cases. In cases |
20641 | where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl), |
20642 | we can be sure that the parameter was passed using the same type as it is |
20643 | declared to have within the function, and that its DECL_INCOMING_RTL |
20644 | points us to a place where a value of that type is passed. |
20645 | |
20646 | In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different, |
20647 | we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL |
20648 | because in these cases DECL_INCOMING_RTL points us to a value of some |
20649 | type which is *different* from the type of the parameter itself. Thus, |
20650 | if we tried to use DECL_INCOMING_RTL to generate a location attribute in |
20651 | such cases, the debugger would end up (for example) trying to fetch a |
20652 | `float' from a place which actually contains the first part of a |
20653 | `double'. That would lead to really incorrect and confusing |
20654 | output at debug-time. |
20655 | |
20656 | So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL |
20657 | in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There |
20658 | are a couple of exceptions however. On little-endian machines we can |
20659 | get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is |
20660 | not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is |
20661 | an integral type that is smaller than TREE_TYPE (decl). These cases arise |
20662 | when (on a little-endian machine) a non-prototyped function has a |
20663 | parameter declared to be of type `short' or `char'. In such cases, |
20664 | TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will |
20665 | be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the |
20666 | passed `int' value. If the debugger then uses that address to fetch |
20667 | a `short' or a `char' (on a little-endian machine) the result will be |
20668 | the correct data, so we allow for such exceptional cases below. |
20669 | |
20670 | Note that our goal here is to describe the place where the given formal |
20671 | parameter lives during most of the function's activation (i.e. between the |
20672 | end of the prologue and the start of the epilogue). We'll do that as best |
20673 | as we can. Note however that if the given formal parameter is modified |
20674 | sometime during the execution of the function, then a stack backtrace (at |
20675 | debug-time) will show the function as having been called with the *new* |
20676 | value rather than the value which was originally passed in. This happens |
20677 | rarely enough that it is not a major problem, but it *is* a problem, and |
20678 | I'd like to fix it. |
20679 | |
20680 | A future version of dwarf2out.cc may generate two additional attributes for |
20681 | any given DW_TAG_formal_parameter DIE which will describe the "passed |
20682 | type" and the "passed location" for the given formal parameter in addition |
20683 | to the attributes we now generate to indicate the "declared type" and the |
20684 | "active location" for each parameter. This additional set of attributes |
20685 | could be used by debuggers for stack backtraces. Separately, note that |
20686 | sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also. |
20687 | This happens (for example) for inlined-instances of inline function formal |
20688 | parameters which are never referenced. This really shouldn't be |
20689 | happening. All PARM_DECL nodes should get valid non-NULL |
20690 | DECL_INCOMING_RTL values. FIXME. */ |
20691 | |
20692 | /* Use DECL_RTL as the "location" unless we find something better. */ |
20693 | rtl = DECL_RTL_IF_SET (decl); |
20694 | |
20695 | /* When generating abstract instances, ignore everything except |
20696 | constants, symbols living in memory, and symbols living in |
20697 | fixed registers. */ |
20698 | if (! reload_completed) |
20699 | { |
20700 | if (rtl |
20701 | && (CONSTANT_P (rtl) |
20702 | || (MEM_P (rtl) |
20703 | && CONSTANT_P (XEXP (rtl, 0))) |
20704 | || (REG_P (rtl) |
20705 | && VAR_P (decl) |
20706 | && TREE_STATIC (decl)))) |
20707 | { |
20708 | rtl = targetm.delegitimize_address (rtl); |
20709 | return rtl; |
20710 | } |
20711 | rtl = NULL_RTX; |
20712 | } |
20713 | else if (TREE_CODE (decl) == PARM_DECL) |
20714 | { |
20715 | if (rtl == NULL_RTX |
20716 | || is_pseudo_reg (rtl) |
20717 | || (MEM_P (rtl) |
20718 | && is_pseudo_reg (XEXP (rtl, 0)) |
20719 | && DECL_INCOMING_RTL (decl) |
20720 | && MEM_P (DECL_INCOMING_RTL (decl)) |
20721 | && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl)))) |
20722 | { |
20723 | tree declared_type = TREE_TYPE (decl); |
20724 | tree passed_type = DECL_ARG_TYPE (decl); |
20725 | machine_mode dmode = TYPE_MODE (declared_type); |
20726 | machine_mode pmode = TYPE_MODE (passed_type); |
20727 | |
20728 | /* This decl represents a formal parameter which was optimized out. |
20729 | Note that DECL_INCOMING_RTL may be NULL in here, but we handle |
20730 | all cases where (rtl == NULL_RTX) just below. */ |
20731 | if (dmode == pmode) |
20732 | rtl = DECL_INCOMING_RTL (decl); |
20733 | else if ((rtl == NULL_RTX || is_pseudo_reg (rtl)) |
20734 | && SCALAR_INT_MODE_P (dmode) |
20735 | && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode)) |
20736 | && DECL_INCOMING_RTL (decl)) |
20737 | { |
20738 | rtx inc = DECL_INCOMING_RTL (decl); |
20739 | if (REG_P (inc)) |
20740 | rtl = inc; |
20741 | else if (MEM_P (inc)) |
20742 | { |
20743 | if (BYTES_BIG_ENDIAN) |
20744 | rtl = adjust_address_nv (inc, dmode, |
20745 | GET_MODE_SIZE (pmode) |
20746 | - GET_MODE_SIZE (dmode)); |
20747 | else |
20748 | rtl = inc; |
20749 | } |
20750 | } |
20751 | } |
20752 | |
20753 | /* If the parm was passed in registers, but lives on the stack, then |
20754 | make a big endian correction if the mode of the type of the |
20755 | parameter is not the same as the mode of the rtl. */ |
20756 | /* ??? This is the same series of checks that are made in dbxout.cc before |
20757 | we reach the big endian correction code there. It isn't clear if all |
20758 | of these checks are necessary here, but keeping them all is the safe |
20759 | thing to do. */ |
20760 | else if (MEM_P (rtl) |
20761 | && XEXP (rtl, 0) != const0_rtx |
20762 | && ! CONSTANT_P (XEXP (rtl, 0)) |
20763 | /* Not passed in memory. */ |
20764 | && !MEM_P (DECL_INCOMING_RTL (decl)) |
20765 | /* Not passed by invisible reference. */ |
20766 | && (!REG_P (XEXP (rtl, 0)) |
20767 | || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM |
20768 | || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM |
20769 | #if !HARD_FRAME_POINTER_IS_ARG_POINTER |
20770 | || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM |
20771 | #endif |
20772 | ) |
20773 | /* Big endian correction check. */ |
20774 | && BYTES_BIG_ENDIAN |
20775 | && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl) |
20776 | && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))), |
20777 | UNITS_PER_WORD)) |
20778 | { |
20779 | machine_mode addr_mode = get_address_mode (mem: rtl); |
20780 | poly_int64 offset = (UNITS_PER_WORD |
20781 | - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))); |
20782 | |
20783 | rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)), |
20784 | plus_constant (addr_mode, XEXP (rtl, 0), offset)); |
20785 | } |
20786 | } |
20787 | else if (VAR_P (decl) |
20788 | && rtl |
20789 | && MEM_P (rtl) |
20790 | && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))) |
20791 | { |
20792 | machine_mode addr_mode = get_address_mode (mem: rtl); |
20793 | poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)), |
20794 | GET_MODE (rtl)); |
20795 | |
20796 | /* If a variable is declared "register" yet is smaller than |
20797 | a register, then if we store the variable to memory, it |
20798 | looks like we're storing a register-sized value, when in |
20799 | fact we are not. We need to adjust the offset of the |
20800 | storage location to reflect the actual value's bytes, |
20801 | else gdb will not be able to display it. */ |
20802 | if (maybe_ne (a: offset, b: 0)) |
20803 | rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)), |
20804 | plus_constant (addr_mode, XEXP (rtl, 0), offset)); |
20805 | } |
20806 | |
20807 | /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant, |
20808 | and will have been substituted directly into all expressions that use it. |
20809 | C does not have such a concept, but C++ and other languages do. */ |
20810 | if (!rtl && VAR_P (decl) && DECL_INITIAL (decl)) |
20811 | rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl)); |
20812 | |
20813 | if (rtl) |
20814 | rtl = targetm.delegitimize_address (rtl); |
20815 | |
20816 | /* If we don't look past the constant pool, we risk emitting a |
20817 | reference to a constant pool entry that isn't referenced from |
20818 | code, and thus is not emitted. */ |
20819 | if (rtl) |
20820 | rtl = avoid_constant_pool_reference (rtl); |
20821 | |
20822 | /* Try harder to get a rtl. If this symbol ends up not being emitted |
20823 | in the current CU, resolve_addr will remove the expression referencing |
20824 | it. */ |
20825 | if (rtl == NULL_RTX |
20826 | && !(early_dwarf && (flag_generate_lto || flag_generate_offload)) |
20827 | && VAR_P (decl) |
20828 | && !DECL_EXTERNAL (decl) |
20829 | && TREE_STATIC (decl) |
20830 | && DECL_NAME (decl) |
20831 | && !DECL_HARD_REGISTER (decl) |
20832 | && DECL_MODE (decl) != VOIDmode) |
20833 | { |
20834 | rtl = make_decl_rtl_for_debug (decl); |
20835 | if (!MEM_P (rtl) |
20836 | || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF |
20837 | || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl) |
20838 | rtl = NULL_RTX; |
20839 | } |
20840 | |
20841 | return rtl; |
20842 | } |
20843 | |
20844 | /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is |
20845 | returned. If so, the decl for the COMMON block is returned, and the |
20846 | value is the offset into the common block for the symbol. */ |
20847 | |
20848 | static tree |
20849 | fortran_common (tree decl, HOST_WIDE_INT *value) |
20850 | { |
20851 | tree val_expr, cvar; |
20852 | machine_mode mode; |
20853 | poly_int64 bitsize, bitpos; |
20854 | tree offset; |
20855 | HOST_WIDE_INT cbitpos; |
20856 | int unsignedp, reversep, volatilep = 0; |
20857 | |
20858 | /* If the decl isn't a VAR_DECL, or if it isn't static, or if |
20859 | it does not have a value (the offset into the common area), or if it |
20860 | is thread local (as opposed to global) then it isn't common, and shouldn't |
20861 | be handled as such. */ |
20862 | if (!VAR_P (decl) |
20863 | || !TREE_STATIC (decl) |
20864 | || !DECL_HAS_VALUE_EXPR_P (decl) |
20865 | || !is_fortran ()) |
20866 | return NULL_TREE; |
20867 | |
20868 | val_expr = DECL_VALUE_EXPR (decl); |
20869 | if (TREE_CODE (val_expr) != COMPONENT_REF) |
20870 | return NULL_TREE; |
20871 | |
20872 | cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode, |
20873 | &unsignedp, &reversep, &volatilep); |
20874 | |
20875 | if (cvar == NULL_TREE |
20876 | || !VAR_P (cvar) |
20877 | || DECL_ARTIFICIAL (cvar) |
20878 | || !TREE_PUBLIC (cvar) |
20879 | /* We don't expect to have to cope with variable offsets, |
20880 | since at present all static data must have a constant size. */ |
20881 | || !bitpos.is_constant (const_value: &cbitpos)) |
20882 | return NULL_TREE; |
20883 | |
20884 | *value = 0; |
20885 | if (offset != NULL) |
20886 | { |
20887 | if (!tree_fits_shwi_p (offset)) |
20888 | return NULL_TREE; |
20889 | *value = tree_to_shwi (offset); |
20890 | } |
20891 | if (cbitpos != 0) |
20892 | *value += cbitpos / BITS_PER_UNIT; |
20893 | |
20894 | return cvar; |
20895 | } |
20896 | |
20897 | /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value |
20898 | data attribute for a variable or a parameter. We generate the |
20899 | DW_AT_const_value attribute only in those cases where the given variable |
20900 | or parameter does not have a true "location" either in memory or in a |
20901 | register. This can happen (for example) when a constant is passed as an |
20902 | actual argument in a call to an inline function. (It's possible that |
20903 | these things can crop up in other ways also.) Note that one type of |
20904 | constant value which can be passed into an inlined function is a constant |
20905 | pointer. This can happen for example if an actual argument in an inlined |
20906 | function call evaluates to a compile-time constant address. |
20907 | |
20908 | CACHE_P is true if it is worth caching the location list for DECL, |
20909 | so that future calls can reuse it rather than regenerate it from scratch. |
20910 | This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines, |
20911 | since we will need to refer to them each time the function is inlined. */ |
20912 | |
20913 | static bool |
20914 | add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p) |
20915 | { |
20916 | rtx rtl; |
20917 | dw_loc_list_ref list; |
20918 | var_loc_list *loc_list; |
20919 | cached_dw_loc_list *cache; |
20920 | |
20921 | if (early_dwarf) |
20922 | return false; |
20923 | |
20924 | if (TREE_CODE (decl) == ERROR_MARK) |
20925 | return false; |
20926 | |
20927 | if (get_AT (die, attr_kind: DW_AT_location) |
20928 | || get_AT (die, attr_kind: DW_AT_const_value)) |
20929 | return true; |
20930 | |
20931 | gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL |
20932 | || TREE_CODE (decl) == RESULT_DECL); |
20933 | |
20934 | /* Try to get some constant RTL for this decl, and use that as the value of |
20935 | the location. */ |
20936 | |
20937 | rtl = rtl_for_decl_location (decl); |
20938 | if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING) |
20939 | && add_const_value_attribute (die, DECL_MODE (decl), rtl)) |
20940 | return true; |
20941 | |
20942 | /* See if we have single element location list that is equivalent to |
20943 | a constant value. That way we are better to use add_const_value_attribute |
20944 | rather than expanding constant value equivalent. */ |
20945 | loc_list = lookup_decl_loc (decl); |
20946 | if (loc_list |
20947 | && loc_list->first |
20948 | && loc_list->first->next == NULL |
20949 | && NOTE_P (loc_list->first->loc) |
20950 | && NOTE_VAR_LOCATION (loc_list->first->loc) |
20951 | && NOTE_VAR_LOCATION_LOC (loc_list->first->loc)) |
20952 | { |
20953 | struct var_loc_node *node; |
20954 | |
20955 | node = loc_list->first; |
20956 | rtl = NOTE_VAR_LOCATION_LOC (node->loc); |
20957 | if (GET_CODE (rtl) == EXPR_LIST) |
20958 | rtl = XEXP (rtl, 0); |
20959 | if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING) |
20960 | && add_const_value_attribute (die, DECL_MODE (decl), rtl)) |
20961 | return true; |
20962 | } |
20963 | /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its |
20964 | list several times. See if we've already cached the contents. */ |
20965 | list = NULL; |
20966 | if (loc_list == NULL || cached_dw_loc_list_table == NULL) |
20967 | cache_p = false; |
20968 | if (cache_p) |
20969 | { |
20970 | cache = cached_dw_loc_list_table->find_with_hash (comparable: decl, DECL_UID (decl)); |
20971 | if (cache) |
20972 | list = cache->loc_list; |
20973 | } |
20974 | if (list == NULL) |
20975 | { |
20976 | list = loc_list_from_tree (loc: decl, want_address: decl_by_reference_p (decl) ? 0 : 2, |
20977 | NULL); |
20978 | /* It is usually worth caching this result if the decl is from |
20979 | BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */ |
20980 | if (cache_p && list && list->dw_loc_next) |
20981 | { |
20982 | cached_dw_loc_list **slot |
20983 | = cached_dw_loc_list_table->find_slot_with_hash (comparable: decl, |
20984 | DECL_UID (decl), |
20985 | insert: INSERT); |
20986 | cache = ggc_cleared_alloc<cached_dw_loc_list> (); |
20987 | cache->decl_id = DECL_UID (decl); |
20988 | cache->loc_list = list; |
20989 | *slot = cache; |
20990 | } |
20991 | } |
20992 | if (list) |
20993 | { |
20994 | add_AT_location_description (die, attr_kind: DW_AT_location, descr: list); |
20995 | return true; |
20996 | } |
20997 | /* None of that worked, so it must not really have a location; |
20998 | try adding a constant value attribute from the DECL_INITIAL. */ |
20999 | return tree_add_const_value_attribute_for_decl (die, decl); |
21000 | } |
21001 | |
21002 | /* Mangle referenced decls. */ |
21003 | static tree |
21004 | mangle_referenced_decls (tree *tp, int *walk_subtrees, void *) |
21005 | { |
21006 | if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp)) |
21007 | *walk_subtrees = 0; |
21008 | |
21009 | if (VAR_OR_FUNCTION_DECL_P (*tp)) |
21010 | assign_assembler_name_if_needed (*tp); |
21011 | |
21012 | return NULL_TREE; |
21013 | } |
21014 | |
21015 | /* Attach a DW_AT_const_value attribute to DIE. The value of the |
21016 | attribute is the const value T. */ |
21017 | |
21018 | static bool |
21019 | tree_add_const_value_attribute (dw_die_ref die, tree t) |
21020 | { |
21021 | tree init; |
21022 | tree type = TREE_TYPE (t); |
21023 | |
21024 | if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node) |
21025 | return false; |
21026 | |
21027 | init = t; |
21028 | gcc_assert (!DECL_P (init)); |
21029 | |
21030 | if (TREE_CODE (init) == INTEGER_CST) |
21031 | { |
21032 | if (tree_fits_uhwi_p (init)) |
21033 | { |
21034 | add_AT_unsigned (die, attr_kind: DW_AT_const_value, unsigned_val: tree_to_uhwi (init)); |
21035 | return true; |
21036 | } |
21037 | if (tree_fits_shwi_p (init)) |
21038 | { |
21039 | add_AT_int (die, attr_kind: DW_AT_const_value, int_val: tree_to_shwi (init)); |
21040 | return true; |
21041 | } |
21042 | } |
21043 | if (!early_dwarf) |
21044 | { |
21045 | rtx rtl = rtl_for_decl_init (init, type); |
21046 | if (rtl) |
21047 | return add_const_value_attribute (die, TYPE_MODE (type), rtl); |
21048 | } |
21049 | else |
21050 | { |
21051 | /* For early_dwarf force mangling of all referenced symbols. */ |
21052 | tree initializer = init; |
21053 | STRIP_NOPS (initializer); |
21054 | /* rtl_for_decl_init punts on other aggregates, and complex values. */ |
21055 | if (AGGREGATE_TYPE_P (type) |
21056 | || (TREE_CODE (initializer) == VIEW_CONVERT_EXPR |
21057 | && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (initializer, 0)))) |
21058 | || TREE_CODE (type) == COMPLEX_TYPE) |
21059 | ; |
21060 | else if (initializer_constant_valid_p (initializer, type)) |
21061 | walk_tree (&initializer, mangle_referenced_decls, NULL, NULL); |
21062 | } |
21063 | /* If the host and target are sane, try harder. */ |
21064 | if (CHAR_BIT == 8 && BITS_PER_UNIT == 8 |
21065 | && initializer_constant_valid_p (init, type)) |
21066 | { |
21067 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init)); |
21068 | if (size > 0 && (int) size == size) |
21069 | { |
21070 | unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (c: size); |
21071 | |
21072 | if (native_encode_initializer (init, array, size) == size) |
21073 | { |
21074 | add_AT_vec (die, attr_kind: DW_AT_const_value, length: size, elt_size: 1, array); |
21075 | return true; |
21076 | } |
21077 | ggc_free (array); |
21078 | } |
21079 | } |
21080 | return false; |
21081 | } |
21082 | |
21083 | /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the |
21084 | attribute is the const value of T, where T is an integral constant |
21085 | variable with static storage duration |
21086 | (so it can't be a PARM_DECL or a RESULT_DECL). */ |
21087 | |
21088 | static bool |
21089 | tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl) |
21090 | { |
21091 | |
21092 | if (!decl |
21093 | || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL) |
21094 | || (VAR_P (decl) && !TREE_STATIC (decl))) |
21095 | return false; |
21096 | |
21097 | if (TREE_READONLY (decl) |
21098 | && ! TREE_THIS_VOLATILE (decl) |
21099 | && DECL_INITIAL (decl)) |
21100 | /* OK */; |
21101 | else |
21102 | return false; |
21103 | |
21104 | /* Don't add DW_AT_const_value if abstract origin already has one. */ |
21105 | if (get_AT (die: var_die, attr_kind: DW_AT_const_value)) |
21106 | return false; |
21107 | |
21108 | return tree_add_const_value_attribute (die: var_die, DECL_INITIAL (decl)); |
21109 | } |
21110 | |
21111 | /* Convert the CFI instructions for the current function into a |
21112 | location list. This is used for DW_AT_frame_base when we targeting |
21113 | a dwarf2 consumer that does not support the dwarf3 |
21114 | DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA |
21115 | expressions. */ |
21116 | |
21117 | static dw_loc_list_ref |
21118 | convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset) |
21119 | { |
21120 | int ix; |
21121 | dw_fde_ref fde; |
21122 | dw_loc_list_ref list, *list_tail; |
21123 | dw_cfi_ref cfi; |
21124 | dw_cfa_location last_cfa, next_cfa; |
21125 | const char *start_label, *last_label, *section; |
21126 | dw_cfa_location remember; |
21127 | |
21128 | fde = cfun->fde; |
21129 | gcc_assert (fde != NULL); |
21130 | |
21131 | section = secname_for_decl (decl: current_function_decl); |
21132 | list_tail = &list; |
21133 | list = NULL; |
21134 | |
21135 | memset (s: &next_cfa, c: 0, n: sizeof (next_cfa)); |
21136 | next_cfa.reg.set_by_dwreg (INVALID_REGNUM); |
21137 | remember = next_cfa; |
21138 | |
21139 | start_label = fde->dw_fde_begin; |
21140 | |
21141 | /* ??? Bald assumption that the CIE opcode list does not contain |
21142 | advance opcodes. */ |
21143 | FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi) |
21144 | lookup_cfa_1 (cfi, loc: &next_cfa, remember: &remember); |
21145 | |
21146 | last_cfa = next_cfa; |
21147 | last_label = start_label; |
21148 | |
21149 | if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0) |
21150 | { |
21151 | /* If the first partition contained no CFI adjustments, the |
21152 | CIE opcodes apply to the whole first partition. */ |
21153 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21154 | begin: fde->dw_fde_begin, vbegin: 0, end: fde->dw_fde_end, vend: 0, section); |
21155 | list_tail =&(*list_tail)->dw_loc_next; |
21156 | start_label = last_label = fde->dw_fde_second_begin; |
21157 | } |
21158 | |
21159 | FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi) |
21160 | { |
21161 | switch (cfi->dw_cfi_opc) |
21162 | { |
21163 | case DW_CFA_set_loc: |
21164 | case DW_CFA_advance_loc1: |
21165 | case DW_CFA_advance_loc2: |
21166 | case DW_CFA_advance_loc4: |
21167 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
21168 | { |
21169 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21170 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
21171 | |
21172 | list_tail = &(*list_tail)->dw_loc_next; |
21173 | last_cfa = next_cfa; |
21174 | start_label = last_label; |
21175 | } |
21176 | last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr; |
21177 | break; |
21178 | |
21179 | case DW_CFA_advance_loc: |
21180 | /* The encoding is complex enough that we should never emit this. */ |
21181 | gcc_unreachable (); |
21182 | |
21183 | default: |
21184 | lookup_cfa_1 (cfi, loc: &next_cfa, remember: &remember); |
21185 | break; |
21186 | } |
21187 | if (ix + 1 == fde->dw_fde_switch_cfi_index) |
21188 | { |
21189 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
21190 | { |
21191 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21192 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
21193 | |
21194 | list_tail = &(*list_tail)->dw_loc_next; |
21195 | last_cfa = next_cfa; |
21196 | start_label = last_label; |
21197 | } |
21198 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21199 | begin: start_label, vbegin: 0, end: fde->dw_fde_end, vend: 0, section); |
21200 | list_tail = &(*list_tail)->dw_loc_next; |
21201 | start_label = last_label = fde->dw_fde_second_begin; |
21202 | } |
21203 | } |
21204 | |
21205 | if (!cfa_equal_p (&last_cfa, &next_cfa)) |
21206 | { |
21207 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &last_cfa, offset), |
21208 | begin: start_label, vbegin: 0, end: last_label, vend: 0, section); |
21209 | list_tail = &(*list_tail)->dw_loc_next; |
21210 | start_label = last_label; |
21211 | } |
21212 | |
21213 | *list_tail = new_loc_list (expr: build_cfa_loc (cfa: &next_cfa, offset), |
21214 | begin: start_label, vbegin: 0, |
21215 | end: fde->dw_fde_second_begin |
21216 | ? fde->dw_fde_second_end : fde->dw_fde_end, vend: 0, |
21217 | section); |
21218 | |
21219 | maybe_gen_llsym (list); |
21220 | |
21221 | return list; |
21222 | } |
21223 | |
21224 | /* Compute a displacement from the "steady-state frame pointer" to the |
21225 | frame base (often the same as the CFA), and store it in |
21226 | frame_pointer_fb_offset. OFFSET is added to the displacement |
21227 | before the latter is negated. */ |
21228 | |
21229 | static void |
21230 | compute_frame_pointer_to_fb_displacement (poly_int64 offset) |
21231 | { |
21232 | rtx reg, elim; |
21233 | |
21234 | #ifdef FRAME_POINTER_CFA_OFFSET |
21235 | reg = frame_pointer_rtx; |
21236 | offset += FRAME_POINTER_CFA_OFFSET (current_function_decl); |
21237 | #else |
21238 | reg = arg_pointer_rtx; |
21239 | offset += ARG_POINTER_CFA_OFFSET (current_function_decl); |
21240 | #endif |
21241 | |
21242 | elim = (ira_use_lra_p |
21243 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
21244 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
21245 | elim = strip_offset_and_add (x: elim, offset: &offset); |
21246 | |
21247 | frame_pointer_fb_offset = -offset; |
21248 | |
21249 | /* ??? AVR doesn't set up valid eliminations when there is no stack frame |
21250 | in which to eliminate. This is because it's stack pointer isn't |
21251 | directly accessible as a register within the ISA. To work around |
21252 | this, assume that while we cannot provide a proper value for |
21253 | frame_pointer_fb_offset, we won't need one either. We can use |
21254 | hard frame pointer in debug info even if frame pointer isn't used |
21255 | since hard frame pointer in debug info is encoded with DW_OP_fbreg |
21256 | which uses the DW_AT_frame_base attribute, not hard frame pointer |
21257 | directly. */ |
21258 | frame_pointer_fb_offset_valid |
21259 | = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx); |
21260 | } |
21261 | |
21262 | /* Generate a DW_AT_name attribute given some string value to be included as |
21263 | the value of the attribute. */ |
21264 | |
21265 | void |
21266 | add_name_attribute (dw_die_ref die, const char *name_string) |
21267 | { |
21268 | if (name_string != NULL && *name_string != 0) |
21269 | { |
21270 | if (demangle_name_func) |
21271 | name_string = (*demangle_name_func) (name_string); |
21272 | |
21273 | add_AT_string (die, attr_kind: DW_AT_name, str: name_string); |
21274 | } |
21275 | } |
21276 | |
21277 | /* Generate a DW_AT_name attribute given some string value representing a |
21278 | file or filepath to be included as value of the attribute. */ |
21279 | static void |
21280 | add_filename_attribute (dw_die_ref die, const char *name_string) |
21281 | { |
21282 | if (name_string != NULL && *name_string != 0) |
21283 | add_filepath_AT_string (die, attr_kind: DW_AT_name, str: name_string); |
21284 | } |
21285 | |
21286 | /* Generate a DW_AT_description attribute given some string value to be included |
21287 | as the value of the attribute. */ |
21288 | |
21289 | static void |
21290 | add_desc_attribute (dw_die_ref die, const char *name_string) |
21291 | { |
21292 | if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict)) |
21293 | return; |
21294 | |
21295 | if (name_string == NULL || *name_string == 0) |
21296 | return; |
21297 | |
21298 | if (demangle_name_func) |
21299 | name_string = (*demangle_name_func) (name_string); |
21300 | |
21301 | add_AT_string (die, attr_kind: DW_AT_description, str: name_string); |
21302 | } |
21303 | |
21304 | /* Generate a DW_AT_description attribute given some decl to be included |
21305 | as the value of the attribute. */ |
21306 | |
21307 | static void |
21308 | add_desc_attribute (dw_die_ref die, tree decl) |
21309 | { |
21310 | tree decl_name; |
21311 | |
21312 | if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict)) |
21313 | return; |
21314 | |
21315 | if (decl == NULL_TREE || !DECL_P (decl)) |
21316 | return; |
21317 | decl_name = DECL_NAME (decl); |
21318 | |
21319 | if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL) |
21320 | { |
21321 | const char *name = dwarf2_name (decl, scope: 0); |
21322 | add_desc_attribute (die, name_string: name ? name : IDENTIFIER_POINTER (decl_name)); |
21323 | } |
21324 | else |
21325 | { |
21326 | char *desc = print_generic_expr_to_str (decl); |
21327 | add_desc_attribute (die, name_string: desc); |
21328 | free (ptr: desc); |
21329 | } |
21330 | } |
21331 | |
21332 | /* Retrieve the descriptive type of TYPE, if any, make sure it has a |
21333 | DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE |
21334 | of TYPE accordingly. |
21335 | |
21336 | ??? This is a temporary measure until after we're able to generate |
21337 | regular DWARF for the complex Ada type system. */ |
21338 | |
21339 | static void |
21340 | add_gnat_descriptive_type_attribute (dw_die_ref die, tree type, |
21341 | dw_die_ref context_die) |
21342 | { |
21343 | tree dtype; |
21344 | dw_die_ref dtype_die; |
21345 | |
21346 | if (!lang_hooks.types.descriptive_type) |
21347 | return; |
21348 | |
21349 | dtype = lang_hooks.types.descriptive_type (type); |
21350 | if (!dtype) |
21351 | return; |
21352 | |
21353 | dtype_die = lookup_type_die (type: dtype); |
21354 | if (!dtype_die) |
21355 | { |
21356 | gen_type_die (dtype, context_die); |
21357 | dtype_die = lookup_type_die (type: dtype); |
21358 | gcc_assert (dtype_die); |
21359 | } |
21360 | |
21361 | add_AT_die_ref (die, attr_kind: DW_AT_GNAT_descriptive_type, targ_die: dtype_die); |
21362 | } |
21363 | |
21364 | /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */ |
21365 | |
21366 | static const char * |
21367 | comp_dir_string (void) |
21368 | { |
21369 | const char *wd; |
21370 | char *wd_plus_sep = NULL; |
21371 | static const char *cached_wd = NULL; |
21372 | |
21373 | if (cached_wd != NULL) |
21374 | return cached_wd; |
21375 | |
21376 | wd = get_src_pwd (); |
21377 | if (wd == NULL) |
21378 | return NULL; |
21379 | |
21380 | if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR) |
21381 | { |
21382 | size_t wdlen = strlen (s: wd); |
21383 | wd_plus_sep = XNEWVEC (char, wdlen + 2); |
21384 | strcpy (dest: wd_plus_sep, src: wd); |
21385 | wd_plus_sep [wdlen] = DIR_SEPARATOR; |
21386 | wd_plus_sep [wdlen + 1] = 0; |
21387 | wd = wd_plus_sep; |
21388 | } |
21389 | |
21390 | cached_wd = remap_debug_filename (wd); |
21391 | |
21392 | /* remap_debug_filename can just pass through wd or return a new gc string. |
21393 | These two types can't be both stored in a GTY(())-tagged string, but since |
21394 | the cached value lives forever just copy it if needed. */ |
21395 | if (cached_wd != wd) |
21396 | { |
21397 | cached_wd = xstrdup (cached_wd); |
21398 | if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL) |
21399 | free (ptr: wd_plus_sep); |
21400 | } |
21401 | |
21402 | return cached_wd; |
21403 | } |
21404 | |
21405 | /* Generate a DW_AT_comp_dir attribute for DIE. */ |
21406 | |
21407 | static void |
21408 | add_comp_dir_attribute (dw_die_ref die) |
21409 | { |
21410 | const char * wd = comp_dir_string (); |
21411 | if (wd != NULL) |
21412 | add_filepath_AT_string (die, attr_kind: DW_AT_comp_dir, str: wd); |
21413 | } |
21414 | |
21415 | /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a |
21416 | pointer computation, ...), output a representation for that bound according |
21417 | to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See |
21418 | loc_list_from_tree for the meaning of CONTEXT. */ |
21419 | |
21420 | static void |
21421 | add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value, |
21422 | int forms, struct loc_descr_context *context) |
21423 | { |
21424 | dw_die_ref context_die, decl_die = NULL; |
21425 | dw_loc_list_ref list; |
21426 | bool strip_conversions = true; |
21427 | bool placeholder_seen = false; |
21428 | |
21429 | while (strip_conversions) |
21430 | switch (TREE_CODE (value)) |
21431 | { |
21432 | case ERROR_MARK: |
21433 | case SAVE_EXPR: |
21434 | return; |
21435 | |
21436 | CASE_CONVERT: |
21437 | case VIEW_CONVERT_EXPR: |
21438 | value = TREE_OPERAND (value, 0); |
21439 | break; |
21440 | |
21441 | default: |
21442 | strip_conversions = false; |
21443 | break; |
21444 | } |
21445 | |
21446 | /* If possible and permitted, output the attribute as a constant. */ |
21447 | if ((forms & dw_scalar_form_constant) != 0 |
21448 | && TREE_CODE (value) == INTEGER_CST) |
21449 | { |
21450 | unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value)); |
21451 | |
21452 | /* If HOST_WIDE_INT is big enough then represent the bound as |
21453 | a constant value. We need to choose a form based on |
21454 | whether the type is signed or unsigned. We cannot just |
21455 | call add_AT_unsigned if the value itself is positive |
21456 | (add_AT_unsigned might add the unsigned value encoded as |
21457 | DW_FORM_data[1248]). Some DWARF consumers will lookup the |
21458 | bounds type and then sign extend any unsigned values found |
21459 | for signed types. This is needed only for |
21460 | DW_AT_{lower,upper}_bound, since for most other attributes, |
21461 | consumers will treat DW_FORM_data[1248] as unsigned values, |
21462 | regardless of the underlying type. */ |
21463 | if (prec <= HOST_BITS_PER_WIDE_INT |
21464 | || tree_fits_uhwi_p (value)) |
21465 | { |
21466 | if (TYPE_UNSIGNED (TREE_TYPE (value))) |
21467 | add_AT_unsigned (die, attr_kind: attr, TREE_INT_CST_LOW (value)); |
21468 | else |
21469 | add_AT_int (die, attr_kind: attr, TREE_INT_CST_LOW (value)); |
21470 | } |
21471 | else if (dwarf_version >= 5 |
21472 | && TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (value))) == 128) |
21473 | /* Otherwise represent the bound as an unsigned value with |
21474 | the precision of its type. The precision and signedness |
21475 | of the type will be necessary to re-interpret it |
21476 | unambiguously. */ |
21477 | add_AT_wide (die, attr_kind: attr, w: wi::to_wide (t: value)); |
21478 | else |
21479 | { |
21480 | rtx v = immed_wide_int_const (wi::to_wide (t: value), |
21481 | TYPE_MODE (TREE_TYPE (value))); |
21482 | dw_loc_descr_ref loc |
21483 | = loc_descriptor (rtl: v, TYPE_MODE (TREE_TYPE (value)), |
21484 | initialized: VAR_INIT_STATUS_INITIALIZED); |
21485 | if (loc) |
21486 | add_AT_loc (die, attr_kind: attr, loc); |
21487 | } |
21488 | return; |
21489 | } |
21490 | |
21491 | /* Otherwise, if it's possible and permitted too, output a reference to |
21492 | another DIE. */ |
21493 | if ((forms & dw_scalar_form_reference) != 0) |
21494 | { |
21495 | tree decl = NULL_TREE; |
21496 | |
21497 | /* Some type attributes reference an outer type. For instance, the upper |
21498 | bound of an array may reference an embedding record (this happens in |
21499 | Ada). */ |
21500 | if (TREE_CODE (value) == COMPONENT_REF |
21501 | && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR |
21502 | && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL) |
21503 | decl = TREE_OPERAND (value, 1); |
21504 | |
21505 | else if (VAR_P (value) |
21506 | || TREE_CODE (value) == PARM_DECL |
21507 | || TREE_CODE (value) == RESULT_DECL) |
21508 | decl = value; |
21509 | |
21510 | if (decl != NULL_TREE) |
21511 | { |
21512 | decl_die = lookup_decl_die (decl); |
21513 | |
21514 | /* ??? Can this happen, or should the variable have been bound |
21515 | first? Probably it can, since I imagine that we try to create |
21516 | the types of parameters in the order in which they exist in |
21517 | the list, and won't have created a forward reference to a |
21518 | later parameter. */ |
21519 | if (decl_die != NULL) |
21520 | { |
21521 | if (get_AT (die: decl_die, attr_kind: DW_AT_location) |
21522 | || get_AT (die: decl_die, attr_kind: DW_AT_data_member_location) |
21523 | || get_AT (die: decl_die, attr_kind: DW_AT_data_bit_offset) |
21524 | || get_AT (die: decl_die, attr_kind: DW_AT_const_value)) |
21525 | { |
21526 | add_AT_die_ref (die, attr_kind: attr, targ_die: decl_die); |
21527 | return; |
21528 | } |
21529 | } |
21530 | } |
21531 | } |
21532 | |
21533 | /* Last chance: try to create a stack operation procedure to evaluate the |
21534 | value. Do nothing if even that is not possible or permitted. */ |
21535 | if ((forms & dw_scalar_form_exprloc) == 0) |
21536 | return; |
21537 | |
21538 | list = loc_list_from_tree (loc: value, want_address: 2, context); |
21539 | if (context && context->placeholder_arg) |
21540 | { |
21541 | placeholder_seen = context->placeholder_seen; |
21542 | context->placeholder_seen = false; |
21543 | } |
21544 | if (list == NULL || single_element_loc_list_p (list)) |
21545 | { |
21546 | /* If this attribute is not a reference nor constant, it is |
21547 | a DWARF expression rather than location description. For that |
21548 | loc_list_from_tree (value, 0, &context) is needed. */ |
21549 | dw_loc_list_ref list2 = loc_list_from_tree (loc: value, want_address: 0, context); |
21550 | if (list2 && single_element_loc_list_p (list: list2)) |
21551 | { |
21552 | if (placeholder_seen) |
21553 | { |
21554 | struct dwarf_procedure_info dpi; |
21555 | dpi.fndecl = NULL_TREE; |
21556 | dpi.args_count = 1; |
21557 | if (!resolve_args_picking (loc: list2->expr, initial_frame_offset: 1, dpi: &dpi)) |
21558 | return; |
21559 | } |
21560 | add_AT_loc (die, attr_kind: attr, loc: list2->expr); |
21561 | return; |
21562 | } |
21563 | } |
21564 | |
21565 | /* If that failed to give a single element location list, fall back to |
21566 | outputting this as a reference... still if permitted. */ |
21567 | if (list == NULL |
21568 | || (forms & dw_scalar_form_reference) == 0 |
21569 | || placeholder_seen) |
21570 | return; |
21571 | |
21572 | if (!decl_die) |
21573 | { |
21574 | if (current_function_decl == 0) |
21575 | context_die = comp_unit_die (); |
21576 | else |
21577 | context_die = lookup_decl_die (decl: current_function_decl); |
21578 | |
21579 | decl_die = new_die (tag_value: DW_TAG_variable, parent_die: context_die, t: value); |
21580 | add_AT_flag (die: decl_die, attr_kind: DW_AT_artificial, flag: 1); |
21581 | add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false, |
21582 | context_die); |
21583 | } |
21584 | |
21585 | add_AT_location_description (die: decl_die, attr_kind: DW_AT_location, descr: list); |
21586 | add_AT_die_ref (die, attr_kind: attr, targ_die: decl_die); |
21587 | } |
21588 | |
21589 | /* Return the default for DW_AT_lower_bound, or -1 if there is not any |
21590 | default. */ |
21591 | |
21592 | static int |
21593 | lower_bound_default (void) |
21594 | { |
21595 | switch (get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)) |
21596 | { |
21597 | case DW_LANG_C: |
21598 | case DW_LANG_C89: |
21599 | case DW_LANG_C99: |
21600 | case DW_LANG_C11: |
21601 | case DW_LANG_C_plus_plus: |
21602 | case DW_LANG_C_plus_plus_11: |
21603 | case DW_LANG_C_plus_plus_14: |
21604 | case DW_LANG_ObjC: |
21605 | case DW_LANG_ObjC_plus_plus: |
21606 | return 0; |
21607 | case DW_LANG_Fortran77: |
21608 | case DW_LANG_Fortran90: |
21609 | case DW_LANG_Fortran95: |
21610 | case DW_LANG_Fortran03: |
21611 | case DW_LANG_Fortran08: |
21612 | return 1; |
21613 | case DW_LANG_UPC: |
21614 | case DW_LANG_D: |
21615 | case DW_LANG_Python: |
21616 | return dwarf_version >= 4 ? 0 : -1; |
21617 | case DW_LANG_Ada95: |
21618 | case DW_LANG_Ada83: |
21619 | case DW_LANG_Cobol74: |
21620 | case DW_LANG_Cobol85: |
21621 | case DW_LANG_Modula2: |
21622 | case DW_LANG_PLI: |
21623 | return dwarf_version >= 4 ? 1 : -1; |
21624 | default: |
21625 | return -1; |
21626 | } |
21627 | } |
21628 | |
21629 | /* Given a tree node describing an array bound (either lower or upper) output |
21630 | a representation for that bound. */ |
21631 | |
21632 | static void |
21633 | add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr, |
21634 | tree bound, struct loc_descr_context *context) |
21635 | { |
21636 | int dflt; |
21637 | |
21638 | while (1) |
21639 | switch (TREE_CODE (bound)) |
21640 | { |
21641 | /* Strip all conversions. */ |
21642 | CASE_CONVERT: |
21643 | case VIEW_CONVERT_EXPR: |
21644 | bound = TREE_OPERAND (bound, 0); |
21645 | break; |
21646 | |
21647 | /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds |
21648 | are even omitted when they are the default. */ |
21649 | case INTEGER_CST: |
21650 | /* If the value for this bound is the default one, we can even omit the |
21651 | attribute. */ |
21652 | if (bound_attr == DW_AT_lower_bound |
21653 | && tree_fits_shwi_p (bound) |
21654 | && (dflt = lower_bound_default ()) != -1 |
21655 | && tree_to_shwi (bound) == dflt) |
21656 | return; |
21657 | |
21658 | /* FALLTHRU */ |
21659 | |
21660 | default: |
21661 | /* Let GNAT encodings do the magic for self-referential bounds. */ |
21662 | if (is_ada () |
21663 | && gnat_encodings == DWARF_GNAT_ENCODINGS_ALL |
21664 | && contains_placeholder_p (bound)) |
21665 | return; |
21666 | |
21667 | add_scalar_info (die: subrange_die, attr: bound_attr, value: bound, |
21668 | forms: dw_scalar_form_constant |
21669 | | dw_scalar_form_exprloc |
21670 | | dw_scalar_form_reference, |
21671 | context); |
21672 | return; |
21673 | } |
21674 | } |
21675 | |
21676 | /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing |
21677 | possibly nested array subscripts in a flat sequence if COLLAPSE_P is true. |
21678 | |
21679 | This function reuses previously set type and bound information if |
21680 | available. */ |
21681 | |
21682 | static void |
21683 | add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p) |
21684 | { |
21685 | dw_die_ref child = type_die->die_child; |
21686 | struct array_descr_info info; |
21687 | int dimension_number; |
21688 | |
21689 | if (lang_hooks.types.get_array_descr_info) |
21690 | { |
21691 | memset (s: &info, c: 0, n: sizeof (info)); |
21692 | if (lang_hooks.types.get_array_descr_info (type, &info)) |
21693 | /* Fortran sometimes emits array types with no dimension. */ |
21694 | gcc_assert (info.ndimensions >= 0 |
21695 | && info.ndimensions |
21696 | <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN); |
21697 | } |
21698 | else |
21699 | info.ndimensions = 0; |
21700 | |
21701 | for (dimension_number = 0; |
21702 | TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p); |
21703 | type = TREE_TYPE (type), dimension_number++) |
21704 | { |
21705 | tree domain = TYPE_DOMAIN (type); |
21706 | |
21707 | if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0) |
21708 | break; |
21709 | |
21710 | /* Arrays come in three flavors: Unspecified bounds, fixed bounds, |
21711 | and (in GNU C only) variable bounds. Handle all three forms |
21712 | here. */ |
21713 | |
21714 | /* Find and reuse a previously generated DW_TAG_subrange_type if |
21715 | available. |
21716 | |
21717 | For multi-dimensional arrays, as we iterate through the |
21718 | various dimensions in the enclosing for loop above, we also |
21719 | iterate through the DIE children and pick at each |
21720 | DW_TAG_subrange_type previously generated (if available). |
21721 | Each child DW_TAG_subrange_type DIE describes the range of |
21722 | the current dimension. At this point we should have as many |
21723 | DW_TAG_subrange_type's as we have dimensions in the |
21724 | array. */ |
21725 | dw_die_ref subrange_die = NULL; |
21726 | if (child) |
21727 | while (1) |
21728 | { |
21729 | child = child->die_sib; |
21730 | if (child->die_tag == DW_TAG_subrange_type) |
21731 | subrange_die = child; |
21732 | if (child == type_die->die_child) |
21733 | { |
21734 | /* If we wrapped around, stop looking next time. */ |
21735 | child = NULL; |
21736 | break; |
21737 | } |
21738 | if (child->die_tag == DW_TAG_subrange_type) |
21739 | break; |
21740 | } |
21741 | if (!subrange_die) |
21742 | subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: type_die, NULL); |
21743 | |
21744 | if (domain) |
21745 | { |
21746 | /* We have an array type with specified bounds. */ |
21747 | tree lower = TYPE_MIN_VALUE (domain); |
21748 | tree upper = TYPE_MAX_VALUE (domain); |
21749 | tree index_type = TREE_TYPE (domain); |
21750 | |
21751 | if (dimension_number <= info.ndimensions - 1) |
21752 | { |
21753 | lower = info.dimen[dimension_number].lower_bound; |
21754 | upper = info.dimen[dimension_number].upper_bound; |
21755 | index_type = info.dimen[dimension_number].bounds_type; |
21756 | } |
21757 | |
21758 | /* Define the index type. */ |
21759 | if (index_type && !get_AT (die: subrange_die, attr_kind: DW_AT_type)) |
21760 | add_type_attribute (subrange_die, index_type, TYPE_UNQUALIFIED, |
21761 | false, type_die); |
21762 | |
21763 | /* ??? If upper is NULL, the array has unspecified length, |
21764 | but it does have a lower bound. This happens with Fortran |
21765 | dimension arr(N:*) |
21766 | Since the debugger is definitely going to need to know N |
21767 | to produce useful results, go ahead and output the lower |
21768 | bound solo, and hope the debugger can cope. */ |
21769 | |
21770 | if (lower && !get_AT (die: subrange_die, attr_kind: DW_AT_lower_bound)) |
21771 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, bound: lower, NULL); |
21772 | |
21773 | if (!get_AT (die: subrange_die, attr_kind: DW_AT_upper_bound) |
21774 | && !get_AT (die: subrange_die, attr_kind: DW_AT_count)) |
21775 | { |
21776 | if (upper) |
21777 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, bound: upper, NULL); |
21778 | else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type)) |
21779 | /* Zero-length array. */ |
21780 | add_bound_info (subrange_die, bound_attr: DW_AT_count, |
21781 | bound: build_int_cst (TREE_TYPE (lower), 0), NULL); |
21782 | } |
21783 | } |
21784 | |
21785 | /* Otherwise we have an array type with an unspecified length. The |
21786 | DWARF-2 spec does not say how to handle this; let's just leave out the |
21787 | bounds. */ |
21788 | } |
21789 | } |
21790 | |
21791 | /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */ |
21792 | |
21793 | static void |
21794 | add_byte_size_attribute (dw_die_ref die, tree tree_node) |
21795 | { |
21796 | dw_die_ref decl_die; |
21797 | HOST_WIDE_INT size; |
21798 | |
21799 | switch (TREE_CODE (tree_node)) |
21800 | { |
21801 | case ERROR_MARK: |
21802 | size = 0; |
21803 | break; |
21804 | case ENUMERAL_TYPE: |
21805 | case RECORD_TYPE: |
21806 | case UNION_TYPE: |
21807 | case QUAL_UNION_TYPE: |
21808 | if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL |
21809 | && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node)))) |
21810 | { |
21811 | add_AT_die_ref (die, attr_kind: DW_AT_byte_size, targ_die: decl_die); |
21812 | return; |
21813 | } |
21814 | size = int_size_in_bytes (tree_node); |
21815 | break; |
21816 | case FIELD_DECL: |
21817 | /* For a data member of a struct or union, the DW_AT_byte_size is |
21818 | generally given as the number of bytes normally allocated for an |
21819 | object of the *declared* type of the member itself. This is true |
21820 | even for bit-fields. */ |
21821 | size = int_size_in_bytes (field_type (decl: tree_node)); |
21822 | break; |
21823 | default: |
21824 | gcc_unreachable (); |
21825 | } |
21826 | |
21827 | /* Note that `size' might be -1 when we get to this point. If it is, that |
21828 | indicates that the byte size of the entity in question is variable. */ |
21829 | if (size >= 0) |
21830 | add_AT_unsigned (die, attr_kind: DW_AT_byte_size, unsigned_val: size); |
21831 | |
21832 | /* Support for dynamically-sized objects was introduced in DWARF3. */ |
21833 | else if (TYPE_P (tree_node) |
21834 | && (dwarf_version >= 3 || !dwarf_strict) |
21835 | && gnat_encodings != DWARF_GNAT_ENCODINGS_ALL) |
21836 | { |
21837 | struct loc_descr_context ctx = { |
21838 | .context_type: const_cast<tree> (tree_node), /* context_type */ |
21839 | NULL_TREE, /* base_decl */ |
21840 | NULL, /* dpi */ |
21841 | .placeholder_arg: false, /* placeholder_arg */ |
21842 | .placeholder_seen: false, /* placeholder_seen */ |
21843 | .strict_signedness: false /* strict_signedness */ |
21844 | }; |
21845 | |
21846 | tree tree_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (tree_node)); |
21847 | add_scalar_info (die, attr: DW_AT_byte_size, value: tree_size, |
21848 | forms: dw_scalar_form_constant |
21849 | | dw_scalar_form_exprloc |
21850 | | dw_scalar_form_reference, |
21851 | context: &ctx); |
21852 | } |
21853 | } |
21854 | |
21855 | /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default |
21856 | alignment. */ |
21857 | |
21858 | static void |
21859 | add_alignment_attribute (dw_die_ref die, tree tree_node) |
21860 | { |
21861 | if (dwarf_version < 5 && dwarf_strict) |
21862 | return; |
21863 | |
21864 | unsigned align; |
21865 | |
21866 | if (DECL_P (tree_node)) |
21867 | { |
21868 | if (!DECL_USER_ALIGN (tree_node)) |
21869 | return; |
21870 | |
21871 | align = DECL_ALIGN_UNIT (tree_node); |
21872 | } |
21873 | else if (TYPE_P (tree_node)) |
21874 | { |
21875 | if (!TYPE_USER_ALIGN (tree_node)) |
21876 | return; |
21877 | |
21878 | align = TYPE_ALIGN_UNIT (tree_node); |
21879 | } |
21880 | else |
21881 | gcc_unreachable (); |
21882 | |
21883 | add_AT_unsigned (die, attr_kind: DW_AT_alignment, unsigned_val: align); |
21884 | } |
21885 | |
21886 | /* For a FIELD_DECL node which represents a bit-field, output an attribute |
21887 | which specifies the distance in bits from the highest order bit of the |
21888 | "containing object" for the bit-field to the highest order bit of the |
21889 | bit-field itself. |
21890 | |
21891 | For any given bit-field, the "containing object" is a hypothetical object |
21892 | (of some integral or enum type) within which the given bit-field lives. The |
21893 | type of this hypothetical "containing object" is always the same as the |
21894 | declared type of the individual bit-field itself. The determination of the |
21895 | exact location of the "containing object" for a bit-field is rather |
21896 | complicated. It's handled by the `field_byte_offset' function (above). |
21897 | |
21898 | Note that it is the size (in bytes) of the hypothetical "containing object" |
21899 | which will be given in the DW_AT_byte_size attribute for this bit-field. |
21900 | (See `byte_size_attribute' above). */ |
21901 | |
21902 | static inline void |
21903 | add_bit_offset_attribute (dw_die_ref die, tree decl) |
21904 | { |
21905 | HOST_WIDE_INT object_offset_in_bytes; |
21906 | tree original_type = DECL_BIT_FIELD_TYPE (decl); |
21907 | HOST_WIDE_INT bitpos_int; |
21908 | HOST_WIDE_INT highest_order_object_bit_offset; |
21909 | HOST_WIDE_INT highest_order_field_bit_offset; |
21910 | HOST_WIDE_INT bit_offset; |
21911 | |
21912 | /* The containing object is within the DECL_CONTEXT. */ |
21913 | struct vlr_context ctx = { DECL_CONTEXT (decl), NULL_TREE }; |
21914 | |
21915 | field_byte_offset (decl, ctx: &ctx, cst_offset: &object_offset_in_bytes); |
21916 | |
21917 | /* Must be a field and a bit field. */ |
21918 | gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL); |
21919 | |
21920 | /* We can't yet handle bit-fields whose offsets are variable, so if we |
21921 | encounter such things, just return without generating any attribute |
21922 | whatsoever. Likewise for variable or too large size. */ |
21923 | if (! tree_fits_shwi_p (bit_position (decl)) |
21924 | || ! tree_fits_uhwi_p (DECL_SIZE (decl))) |
21925 | return; |
21926 | |
21927 | bitpos_int = int_bit_position (field: decl); |
21928 | |
21929 | /* Note that the bit offset is always the distance (in bits) from the |
21930 | highest-order bit of the "containing object" to the highest-order bit of |
21931 | the bit-field itself. Since the "high-order end" of any object or field |
21932 | is different on big-endian and little-endian machines, the computation |
21933 | below must take account of these differences. */ |
21934 | highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT; |
21935 | highest_order_field_bit_offset = bitpos_int; |
21936 | |
21937 | if (! BYTES_BIG_ENDIAN) |
21938 | { |
21939 | highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl)); |
21940 | highest_order_object_bit_offset += |
21941 | simple_type_size_in_bits (type: original_type); |
21942 | } |
21943 | |
21944 | bit_offset |
21945 | = (! BYTES_BIG_ENDIAN |
21946 | ? highest_order_object_bit_offset - highest_order_field_bit_offset |
21947 | : highest_order_field_bit_offset - highest_order_object_bit_offset); |
21948 | |
21949 | if (bit_offset < 0) |
21950 | add_AT_int (die, attr_kind: DW_AT_bit_offset, int_val: bit_offset); |
21951 | else |
21952 | add_AT_unsigned (die, attr_kind: DW_AT_bit_offset, unsigned_val: (unsigned HOST_WIDE_INT) bit_offset); |
21953 | } |
21954 | |
21955 | /* For a FIELD_DECL node which represents a bit field, output an attribute |
21956 | which specifies the length in bits of the given field. */ |
21957 | |
21958 | static inline void |
21959 | add_bit_size_attribute (dw_die_ref die, tree decl) |
21960 | { |
21961 | /* Must be a field and a bit field. */ |
21962 | gcc_assert (TREE_CODE (decl) == FIELD_DECL |
21963 | && DECL_BIT_FIELD_TYPE (decl)); |
21964 | |
21965 | if (tree_fits_uhwi_p (DECL_SIZE (decl))) |
21966 | add_AT_unsigned (die, attr_kind: DW_AT_bit_size, unsigned_val: tree_to_uhwi (DECL_SIZE (decl))); |
21967 | } |
21968 | |
21969 | /* If the compiled language is ANSI C, then add a 'prototyped' |
21970 | attribute, if arg types are given for the parameters of a function. */ |
21971 | |
21972 | static inline void |
21973 | add_prototyped_attribute (dw_die_ref die, tree func_type) |
21974 | { |
21975 | switch (get_AT_unsigned (die: comp_unit_die (), attr_kind: DW_AT_language)) |
21976 | { |
21977 | case DW_LANG_C: |
21978 | case DW_LANG_C89: |
21979 | case DW_LANG_C99: |
21980 | case DW_LANG_C11: |
21981 | case DW_LANG_ObjC: |
21982 | if (prototype_p (func_type)) |
21983 | add_AT_flag (die, attr_kind: DW_AT_prototyped, flag: 1); |
21984 | break; |
21985 | default: |
21986 | break; |
21987 | } |
21988 | } |
21989 | |
21990 | /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found |
21991 | by looking in the type declaration, the object declaration equate table or |
21992 | the block mapping. */ |
21993 | |
21994 | static inline void |
21995 | add_abstract_origin_attribute (dw_die_ref die, tree origin) |
21996 | { |
21997 | dw_die_ref origin_die = NULL; |
21998 | |
21999 | /* For late LTO debug output we want to refer directly to the abstract |
22000 | DIE in the early debug rather to the possibly existing concrete |
22001 | instance and avoid creating that just for this purpose. */ |
22002 | sym_off_pair *desc; |
22003 | if (in_lto_p |
22004 | && external_die_map |
22005 | && (desc = external_die_map->get (k: origin))) |
22006 | { |
22007 | add_AT_external_die_ref (die, attr_kind: DW_AT_abstract_origin, |
22008 | symbol: desc->sym, offset: desc->off); |
22009 | return; |
22010 | } |
22011 | |
22012 | if (DECL_P (origin)) |
22013 | origin_die = lookup_decl_die (decl: origin); |
22014 | else if (TYPE_P (origin)) |
22015 | origin_die = lookup_type_die (type: origin); |
22016 | else if (TREE_CODE (origin) == BLOCK) |
22017 | origin_die = lookup_block_die (block: origin); |
22018 | |
22019 | /* XXX: Functions that are never lowered don't always have correct block |
22020 | trees (in the case of java, they simply have no block tree, in some other |
22021 | languages). For these functions, there is nothing we can really do to |
22022 | output correct debug info for inlined functions in all cases. Rather |
22023 | than die, we'll just produce deficient debug info now, in that we will |
22024 | have variables without a proper abstract origin. In the future, when all |
22025 | functions are lowered, we should re-add a gcc_assert (origin_die) |
22026 | here. */ |
22027 | |
22028 | if (origin_die) |
22029 | { |
22030 | dw_attr_node *a; |
22031 | /* Like above, if we already created a concrete instance DIE |
22032 | do not use that for the abstract origin but the early DIE |
22033 | if present. */ |
22034 | if (in_lto_p |
22035 | && (a = get_AT (die: origin_die, attr_kind: DW_AT_abstract_origin))) |
22036 | origin_die = AT_ref (a); |
22037 | add_AT_die_ref (die, attr_kind: DW_AT_abstract_origin, targ_die: origin_die); |
22038 | } |
22039 | } |
22040 | |
22041 | /* We do not currently support the pure_virtual attribute. */ |
22042 | |
22043 | static inline void |
22044 | add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl) |
22045 | { |
22046 | if (DECL_VINDEX (func_decl)) |
22047 | { |
22048 | add_AT_unsigned (die, attr_kind: DW_AT_virtuality, unsigned_val: DW_VIRTUALITY_virtual); |
22049 | |
22050 | if (tree_fits_shwi_p (DECL_VINDEX (func_decl))) |
22051 | add_AT_loc (die, attr_kind: DW_AT_vtable_elem_location, |
22052 | loc: new_loc_descr (op: DW_OP_constu, |
22053 | oprnd1: tree_to_shwi (DECL_VINDEX (func_decl)), |
22054 | oprnd2: 0)); |
22055 | |
22056 | /* GNU extension: Record what type this method came from originally. */ |
22057 | if (debug_info_level > DINFO_LEVEL_TERSE |
22058 | && DECL_CONTEXT (func_decl)) |
22059 | add_AT_die_ref (die, attr_kind: DW_AT_containing_type, |
22060 | targ_die: lookup_type_die (DECL_CONTEXT (func_decl))); |
22061 | } |
22062 | } |
22063 | |
22064 | /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the |
22065 | given decl. This used to be a vendor extension until after DWARF 4 |
22066 | standardized it. */ |
22067 | |
22068 | static void |
22069 | add_linkage_attr (dw_die_ref die, tree decl) |
22070 | { |
22071 | const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
22072 | |
22073 | /* Mimic what assemble_name_raw does with a leading '*'. */ |
22074 | if (name[0] == '*') |
22075 | name = &name[1]; |
22076 | |
22077 | if (dwarf_version >= 4) |
22078 | add_AT_string (die, attr_kind: DW_AT_linkage_name, str: name); |
22079 | else |
22080 | add_AT_string (die, attr_kind: DW_AT_MIPS_linkage_name, str: name); |
22081 | } |
22082 | |
22083 | /* Add source coordinate attributes for the given decl. */ |
22084 | |
22085 | static void |
22086 | add_src_coords_attributes (dw_die_ref die, tree decl) |
22087 | { |
22088 | expanded_location s; |
22089 | |
22090 | if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION) |
22091 | return; |
22092 | s = expand_location (DECL_SOURCE_LOCATION (decl)); |
22093 | add_AT_file (die, attr_kind: DW_AT_decl_file, fd: lookup_filename (s.file)); |
22094 | add_AT_unsigned (die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
22095 | if (debug_column_info && s.column) |
22096 | add_AT_unsigned (die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
22097 | } |
22098 | |
22099 | /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */ |
22100 | |
22101 | static void |
22102 | add_linkage_name_raw (dw_die_ref die, tree decl) |
22103 | { |
22104 | /* Defer until we have an assembler name set. */ |
22105 | if (!DECL_ASSEMBLER_NAME_SET_P (decl)) |
22106 | { |
22107 | limbo_die_node *asm_name; |
22108 | |
22109 | asm_name = ggc_cleared_alloc<limbo_die_node> (); |
22110 | asm_name->die = die; |
22111 | asm_name->created_for = decl; |
22112 | asm_name->next = deferred_asm_name; |
22113 | deferred_asm_name = asm_name; |
22114 | } |
22115 | else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)) |
22116 | add_linkage_attr (die, decl); |
22117 | } |
22118 | |
22119 | /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */ |
22120 | |
22121 | static void |
22122 | add_linkage_name (dw_die_ref die, tree decl) |
22123 | { |
22124 | if (debug_info_level > DINFO_LEVEL_NONE |
22125 | && VAR_OR_FUNCTION_DECL_P (decl) |
22126 | && TREE_PUBLIC (decl) |
22127 | && !(VAR_P (decl) && DECL_REGISTER (decl)) |
22128 | && die->die_tag != DW_TAG_member) |
22129 | add_linkage_name_raw (die, decl); |
22130 | } |
22131 | |
22132 | /* Add a DW_AT_name attribute and source coordinate attribute for the |
22133 | given decl, but only if it actually has a name. */ |
22134 | |
22135 | static void |
22136 | add_name_and_src_coords_attributes (dw_die_ref die, tree decl, |
22137 | bool no_linkage_name) |
22138 | { |
22139 | tree decl_name; |
22140 | |
22141 | decl_name = DECL_NAME (decl); |
22142 | if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL) |
22143 | { |
22144 | const char *name = dwarf2_name (decl, scope: 0); |
22145 | if (name) |
22146 | add_name_attribute (die, name_string: name); |
22147 | else |
22148 | add_desc_attribute (die, decl); |
22149 | |
22150 | if (! DECL_ARTIFICIAL (decl)) |
22151 | add_src_coords_attributes (die, decl); |
22152 | |
22153 | if (!no_linkage_name) |
22154 | add_linkage_name (die, decl); |
22155 | } |
22156 | else |
22157 | add_desc_attribute (die, decl); |
22158 | |
22159 | #ifdef VMS_DEBUGGING_INFO |
22160 | /* Get the function's name, as described by its RTL. This may be different |
22161 | from the DECL_NAME name used in the source file. */ |
22162 | if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl)) |
22163 | { |
22164 | add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address, |
22165 | XEXP (DECL_RTL (decl), 0), false); |
22166 | vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0)); |
22167 | } |
22168 | #endif /* VMS_DEBUGGING_INFO */ |
22169 | } |
22170 | |
22171 | /* Add VALUE as a DW_AT_discr_value attribute to DIE. */ |
22172 | |
22173 | static void |
22174 | add_discr_value (dw_die_ref die, dw_discr_value *value) |
22175 | { |
22176 | dw_attr_node attr; |
22177 | |
22178 | attr.dw_attr = DW_AT_discr_value; |
22179 | attr.dw_attr_val.val_class = dw_val_class_discr_value; |
22180 | attr.dw_attr_val.val_entry = NULL; |
22181 | attr.dw_attr_val.v.val_discr_value.pos = value->pos; |
22182 | if (value->pos) |
22183 | attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval; |
22184 | else |
22185 | attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval; |
22186 | add_dwarf_attr (die, attr: &attr); |
22187 | } |
22188 | |
22189 | /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */ |
22190 | |
22191 | static void |
22192 | add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list) |
22193 | { |
22194 | dw_attr_node attr; |
22195 | |
22196 | attr.dw_attr = DW_AT_discr_list; |
22197 | attr.dw_attr_val.val_class = dw_val_class_discr_list; |
22198 | attr.dw_attr_val.val_entry = NULL; |
22199 | attr.dw_attr_val.v.val_discr_list = discr_list; |
22200 | add_dwarf_attr (die, attr: &attr); |
22201 | } |
22202 | |
22203 | static inline dw_discr_list_ref |
22204 | AT_discr_list (dw_attr_node *attr) |
22205 | { |
22206 | return attr->dw_attr_val.v.val_discr_list; |
22207 | } |
22208 | |
22209 | #ifdef VMS_DEBUGGING_INFO |
22210 | /* Output the debug main pointer die for VMS */ |
22211 | |
22212 | void |
22213 | dwarf2out_vms_debug_main_pointer (void) |
22214 | { |
22215 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
22216 | dw_die_ref die; |
22217 | |
22218 | /* Allocate the VMS debug main subprogram die. */ |
22219 | die = new_die_raw (DW_TAG_subprogram); |
22220 | add_name_attribute (die, VMS_DEBUG_MAIN_POINTER); |
22221 | ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL, |
22222 | current_function_funcdef_no); |
22223 | add_AT_lbl_id (die, DW_AT_entry_pc, label); |
22224 | |
22225 | /* Make it the first child of comp_unit_die (). */ |
22226 | die->die_parent = comp_unit_die (); |
22227 | if (comp_unit_die ()->die_child) |
22228 | { |
22229 | die->die_sib = comp_unit_die ()->die_child->die_sib; |
22230 | comp_unit_die ()->die_child->die_sib = die; |
22231 | } |
22232 | else |
22233 | { |
22234 | die->die_sib = die; |
22235 | comp_unit_die ()->die_child = die; |
22236 | } |
22237 | } |
22238 | #endif /* VMS_DEBUGGING_INFO */ |
22239 | |
22240 | /* walk_tree helper function for uses_local_type, below. */ |
22241 | |
22242 | static tree |
22243 | uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
22244 | { |
22245 | if (!TYPE_P (*tp)) |
22246 | *walk_subtrees = 0; |
22247 | else |
22248 | { |
22249 | tree name = TYPE_NAME (*tp); |
22250 | if (name && DECL_P (name) && decl_function_context (name)) |
22251 | return *tp; |
22252 | } |
22253 | return NULL_TREE; |
22254 | } |
22255 | |
22256 | /* If TYPE involves a function-local type (including a local typedef to a |
22257 | non-local type), returns that type; otherwise returns NULL_TREE. */ |
22258 | |
22259 | static tree |
22260 | uses_local_type (tree type) |
22261 | { |
22262 | tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL); |
22263 | return used; |
22264 | } |
22265 | |
22266 | /* Return the DIE for the scope that immediately contains this type. |
22267 | Non-named types that do not involve a function-local type get global |
22268 | scope. Named types nested in namespaces or other types get their |
22269 | containing scope. All other types (i.e. function-local named types) get |
22270 | the current active scope. */ |
22271 | |
22272 | static dw_die_ref |
22273 | scope_die_for (tree t, dw_die_ref context_die) |
22274 | { |
22275 | dw_die_ref scope_die = NULL; |
22276 | tree containing_scope; |
22277 | |
22278 | /* Non-types always go in the current scope. */ |
22279 | gcc_assert (TYPE_P (t)); |
22280 | |
22281 | /* Use the scope of the typedef, rather than the scope of the type |
22282 | it refers to. */ |
22283 | if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t))) |
22284 | containing_scope = DECL_CONTEXT (TYPE_NAME (t)); |
22285 | else |
22286 | containing_scope = TYPE_CONTEXT (t); |
22287 | |
22288 | /* Use the containing namespace if there is one. */ |
22289 | if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL) |
22290 | { |
22291 | if (context_die == lookup_decl_die (decl: containing_scope)) |
22292 | /* OK */; |
22293 | else if (debug_info_level > DINFO_LEVEL_TERSE) |
22294 | context_die = get_context_die (containing_scope); |
22295 | else |
22296 | containing_scope = NULL_TREE; |
22297 | } |
22298 | |
22299 | /* Ignore function type "scopes" from the C frontend. They mean that |
22300 | a tagged type is local to a parmlist of a function declarator, but |
22301 | that isn't useful to DWARF. */ |
22302 | if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE) |
22303 | containing_scope = NULL_TREE; |
22304 | |
22305 | if (SCOPE_FILE_SCOPE_P (containing_scope)) |
22306 | { |
22307 | /* If T uses a local type keep it local as well, to avoid references |
22308 | to function-local DIEs from outside the function. */ |
22309 | if (current_function_decl && uses_local_type (type: t)) |
22310 | scope_die = context_die; |
22311 | else |
22312 | scope_die = comp_unit_die (); |
22313 | } |
22314 | else if (TYPE_P (containing_scope)) |
22315 | { |
22316 | /* For types, we can just look up the appropriate DIE. */ |
22317 | if (debug_info_level > DINFO_LEVEL_TERSE) |
22318 | scope_die = get_context_die (containing_scope); |
22319 | else |
22320 | { |
22321 | scope_die = lookup_type_die_strip_naming_typedef (type: containing_scope); |
22322 | if (scope_die == NULL) |
22323 | scope_die = comp_unit_die (); |
22324 | } |
22325 | } |
22326 | else |
22327 | scope_die = context_die; |
22328 | |
22329 | return scope_die; |
22330 | } |
22331 | |
22332 | /* Returns true if CONTEXT_DIE is internal to a function. */ |
22333 | |
22334 | static inline bool |
22335 | local_scope_p (dw_die_ref context_die) |
22336 | { |
22337 | for (; context_die; context_die = context_die->die_parent) |
22338 | if (context_die->die_tag == DW_TAG_inlined_subroutine |
22339 | || context_die->die_tag == DW_TAG_subprogram) |
22340 | return true; |
22341 | |
22342 | return false; |
22343 | } |
22344 | |
22345 | /* Returns true if CONTEXT_DIE is a class. */ |
22346 | |
22347 | static inline bool |
22348 | class_scope_p (dw_die_ref context_die) |
22349 | { |
22350 | return (context_die |
22351 | && (context_die->die_tag == DW_TAG_structure_type |
22352 | || context_die->die_tag == DW_TAG_class_type |
22353 | || context_die->die_tag == DW_TAG_interface_type |
22354 | || context_die->die_tag == DW_TAG_union_type)); |
22355 | } |
22356 | |
22357 | /* Returns true if CONTEXT_DIE is a class or namespace, for deciding |
22358 | whether or not to treat a DIE in this context as a declaration. */ |
22359 | |
22360 | static inline bool |
22361 | class_or_namespace_scope_p (dw_die_ref context_die) |
22362 | { |
22363 | return (class_scope_p (context_die) |
22364 | || (context_die && context_die->die_tag == DW_TAG_namespace)); |
22365 | } |
22366 | |
22367 | /* Many forms of DIEs require a "type description" attribute. This |
22368 | routine locates the proper "type descriptor" die for the type given |
22369 | by 'type' plus any additional qualifiers given by 'cv_quals', and |
22370 | adds a DW_AT_type attribute below the given die. */ |
22371 | |
22372 | static void |
22373 | add_type_attribute (dw_die_ref object_die, tree type, int cv_quals, |
22374 | bool reverse, dw_die_ref context_die) |
22375 | { |
22376 | enum tree_code code = TREE_CODE (type); |
22377 | dw_die_ref type_die = NULL; |
22378 | |
22379 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
22380 | return; |
22381 | |
22382 | /* ??? If this type is an unnamed subrange type of an integral, floating-point |
22383 | or fixed-point type, use the inner type. This is because we have no |
22384 | support for unnamed types in base_type_die. This can happen if this is |
22385 | an Ada subrange type. Correct solution is emit a subrange type die. */ |
22386 | if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE) |
22387 | && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0) |
22388 | type = TREE_TYPE (type), code = TREE_CODE (type); |
22389 | |
22390 | if (code == ERROR_MARK |
22391 | /* Handle a special case. For functions whose return type is void, we |
22392 | generate *no* type attribute. (Note that no object may have type |
22393 | `void', so this only applies to function return types). */ |
22394 | || code == VOID_TYPE) |
22395 | return; |
22396 | |
22397 | type_die = modified_type_die (type, |
22398 | cv_quals: cv_quals | TYPE_QUALS (type), |
22399 | reverse, |
22400 | context_die); |
22401 | |
22402 | if (type_die != NULL) |
22403 | add_AT_die_ref (die: object_die, attr_kind: DW_AT_type, targ_die: type_die); |
22404 | } |
22405 | |
22406 | /* Given an object die, add the calling convention attribute for the |
22407 | function call type. */ |
22408 | static void |
22409 | add_calling_convention_attribute (dw_die_ref subr_die, tree decl) |
22410 | { |
22411 | enum dwarf_calling_convention value = DW_CC_normal; |
22412 | |
22413 | value = ((enum dwarf_calling_convention) |
22414 | targetm.dwarf_calling_convention (TREE_TYPE (decl))); |
22415 | |
22416 | if (is_fortran () |
22417 | && id_equal (DECL_ASSEMBLER_NAME (decl), str: "MAIN__" )) |
22418 | { |
22419 | /* DWARF 2 doesn't provide a way to identify a program's source-level |
22420 | entry point. DW_AT_calling_convention attributes are only meant |
22421 | to describe functions' calling conventions. However, lacking a |
22422 | better way to signal the Fortran main program, we used this for |
22423 | a long time, following existing custom. Now, DWARF 4 has |
22424 | DW_AT_main_subprogram, which we add below, but some tools still |
22425 | rely on the old way, which we thus keep. */ |
22426 | value = DW_CC_program; |
22427 | |
22428 | if (dwarf_version >= 4 || !dwarf_strict) |
22429 | add_AT_flag (die: subr_die, attr_kind: DW_AT_main_subprogram, flag: 1); |
22430 | } |
22431 | |
22432 | /* Only add the attribute if the backend requests it, and |
22433 | is not DW_CC_normal. */ |
22434 | if (value && (value != DW_CC_normal)) |
22435 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_calling_convention, unsigned_val: value); |
22436 | } |
22437 | |
22438 | /* Given a tree pointer to a struct, class, union, or enum type node, return |
22439 | a pointer to the (string) tag name for the given type, or zero if the type |
22440 | was declared without a tag. */ |
22441 | |
22442 | static const char * |
22443 | type_tag (const_tree type) |
22444 | { |
22445 | const char *name = 0; |
22446 | |
22447 | if (TYPE_NAME (type) != 0) |
22448 | { |
22449 | tree t = 0; |
22450 | |
22451 | /* Find the IDENTIFIER_NODE for the type name. */ |
22452 | if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE |
22453 | && !TYPE_NAMELESS (type)) |
22454 | t = TYPE_NAME (type); |
22455 | |
22456 | /* The g++ front end makes the TYPE_NAME of *each* tagged type point to |
22457 | a TYPE_DECL node, regardless of whether or not a `typedef' was |
22458 | involved. */ |
22459 | else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
22460 | && ! DECL_IGNORED_P (TYPE_NAME (type))) |
22461 | { |
22462 | /* We want to be extra verbose. Don't call dwarf_name if |
22463 | DECL_NAME isn't set. The default hook for decl_printable_name |
22464 | doesn't like that, and in this context it's correct to return |
22465 | 0, instead of "<anonymous>" or the like. */ |
22466 | if (DECL_NAME (TYPE_NAME (type)) |
22467 | && !DECL_NAMELESS (TYPE_NAME (type))) |
22468 | name = lang_hooks.dwarf_name (TYPE_NAME (type), 2); |
22469 | } |
22470 | |
22471 | /* Now get the name as a string, or invent one. */ |
22472 | if (!name && t != 0) |
22473 | name = IDENTIFIER_POINTER (t); |
22474 | } |
22475 | |
22476 | return (name == 0 || *name == '\0') ? 0 : name; |
22477 | } |
22478 | |
22479 | /* Return the type associated with a data member, make a special check |
22480 | for bit field types. */ |
22481 | |
22482 | static inline tree |
22483 | member_declared_type (const_tree member) |
22484 | { |
22485 | return (DECL_BIT_FIELD_TYPE (member) |
22486 | ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member)); |
22487 | } |
22488 | |
22489 | /* Get the decl's label, as described by its RTL. This may be different |
22490 | from the DECL_NAME name used in the source file. */ |
22491 | |
22492 | #if 0 |
22493 | static const char * |
22494 | decl_start_label (tree decl) |
22495 | { |
22496 | rtx x; |
22497 | const char *fnname; |
22498 | |
22499 | x = DECL_RTL (decl); |
22500 | gcc_assert (MEM_P (x)); |
22501 | |
22502 | x = XEXP (x, 0); |
22503 | gcc_assert (GET_CODE (x) == SYMBOL_REF); |
22504 | |
22505 | fnname = XSTR (x, 0); |
22506 | return fnname; |
22507 | } |
22508 | #endif |
22509 | |
22510 | /* For variable-length arrays that have been previously generated, but |
22511 | may be incomplete due to missing subscript info, fill the subscript |
22512 | info. Return TRUE if this is one of those cases. */ |
22513 | |
22514 | static bool |
22515 | fill_variable_array_bounds (tree type) |
22516 | { |
22517 | if (TREE_ASM_WRITTEN (type) |
22518 | && TREE_CODE (type) == ARRAY_TYPE |
22519 | && variably_modified_type_p (type, NULL)) |
22520 | { |
22521 | dw_die_ref array_die = lookup_type_die (type); |
22522 | if (!array_die) |
22523 | return false; |
22524 | add_subscript_info (type_die: array_die, type, collapse_p: !is_ada ()); |
22525 | return true; |
22526 | } |
22527 | return false; |
22528 | } |
22529 | |
22530 | /* These routines generate the internal representation of the DIE's for |
22531 | the compilation unit. Debugging information is collected by walking |
22532 | the declaration trees passed in from dwarf2out_decl(). */ |
22533 | |
22534 | static void |
22535 | gen_array_type_die (tree type, dw_die_ref context_die) |
22536 | { |
22537 | dw_die_ref array_die; |
22538 | |
22539 | /* GNU compilers represent multidimensional array types as sequences of one |
22540 | dimensional array types whose element types are themselves array types. |
22541 | We sometimes squish that down to a single array_type DIE with multiple |
22542 | subscripts in the Dwarf debugging info. The draft Dwarf specification |
22543 | say that we are allowed to do this kind of compression in C, because |
22544 | there is no difference between an array of arrays and a multidimensional |
22545 | array. We don't do this for Ada to remain as close as possible to the |
22546 | actual representation, which is especially important against the language |
22547 | flexibilty wrt arrays of variable size. */ |
22548 | |
22549 | bool collapse_nested_arrays = !is_ada (); |
22550 | |
22551 | if (fill_variable_array_bounds (type)) |
22552 | return; |
22553 | |
22554 | dw_die_ref scope_die = scope_die_for (t: type, context_die); |
22555 | tree element_type; |
22556 | |
22557 | /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as |
22558 | DW_TAG_string_type doesn't have DW_AT_type attribute). */ |
22559 | if (TREE_CODE (type) == ARRAY_TYPE |
22560 | && TYPE_STRING_FLAG (type) |
22561 | && is_fortran () |
22562 | && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node)) |
22563 | { |
22564 | HOST_WIDE_INT size; |
22565 | |
22566 | array_die = new_die (tag_value: DW_TAG_string_type, parent_die: scope_die, t: type); |
22567 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
22568 | equate_type_number_to_die (type, type_die: array_die); |
22569 | size = int_size_in_bytes (type); |
22570 | if (size >= 0) |
22571 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_byte_size, unsigned_val: size); |
22572 | /* ??? We can't annotate types late, but for LTO we may not |
22573 | generate a location early either (gfortran.dg/save_6.f90). */ |
22574 | else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload)) |
22575 | && TYPE_DOMAIN (type) != NULL_TREE |
22576 | && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE) |
22577 | { |
22578 | tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); |
22579 | tree rszdecl = szdecl; |
22580 | |
22581 | size = int_size_in_bytes (TREE_TYPE (szdecl)); |
22582 | if (!DECL_P (szdecl)) |
22583 | { |
22584 | if (INDIRECT_REF_P (szdecl) |
22585 | && DECL_P (TREE_OPERAND (szdecl, 0))) |
22586 | { |
22587 | rszdecl = TREE_OPERAND (szdecl, 0); |
22588 | if (int_size_in_bytes (TREE_TYPE (rszdecl)) |
22589 | != DWARF2_ADDR_SIZE) |
22590 | size = 0; |
22591 | } |
22592 | else |
22593 | size = 0; |
22594 | } |
22595 | if (size > 0) |
22596 | { |
22597 | dw_loc_list_ref loc |
22598 | = loc_list_from_tree (loc: rszdecl, want_address: szdecl == rszdecl ? 2 : 0, |
22599 | NULL); |
22600 | if (loc) |
22601 | { |
22602 | add_AT_location_description (die: array_die, attr_kind: DW_AT_string_length, |
22603 | descr: loc); |
22604 | if (size != DWARF2_ADDR_SIZE) |
22605 | add_AT_unsigned (die: array_die, dwarf_version >= 5 |
22606 | ? DW_AT_string_length_byte_size |
22607 | : DW_AT_byte_size, unsigned_val: size); |
22608 | } |
22609 | } |
22610 | } |
22611 | return; |
22612 | } |
22613 | |
22614 | array_die = new_die (tag_value: DW_TAG_array_type, parent_die: scope_die, t: type); |
22615 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
22616 | equate_type_number_to_die (type, type_die: array_die); |
22617 | |
22618 | if (VECTOR_TYPE_P (type)) |
22619 | add_AT_flag (die: array_die, attr_kind: DW_AT_GNU_vector, flag: 1); |
22620 | |
22621 | /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */ |
22622 | if (is_fortran () |
22623 | && TREE_CODE (type) == ARRAY_TYPE |
22624 | && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE |
22625 | && !TYPE_STRING_FLAG (TREE_TYPE (type))) |
22626 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_col_major); |
22627 | |
22628 | #if 0 |
22629 | /* We default the array ordering. Debuggers will probably do the right |
22630 | things even if DW_AT_ordering is not present. It's not even an issue |
22631 | until we start to get into multidimensional arrays anyway. If a debugger |
22632 | is ever caught doing the Wrong Thing for multi-dimensional arrays, |
22633 | then we'll have to put the DW_AT_ordering attribute back in. (But if |
22634 | and when we find out that we need to put these in, we will only do so |
22635 | for multidimensional arrays. */ |
22636 | add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major); |
22637 | #endif |
22638 | |
22639 | if (VECTOR_TYPE_P (type)) |
22640 | { |
22641 | /* For VECTOR_TYPEs we use an array DIE with appropriate bounds. */ |
22642 | dw_die_ref subrange_die = new_die (tag_value: DW_TAG_subrange_type, parent_die: array_die, NULL); |
22643 | int lb = lower_bound_default (); |
22644 | if (lb == -1) |
22645 | lb = 0; |
22646 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, size_int (lb), NULL); |
22647 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, |
22648 | size_int (lb + TYPE_VECTOR_SUBPARTS (type) - 1), NULL); |
22649 | } |
22650 | else |
22651 | add_subscript_info (type_die: array_die, type, collapse_p: collapse_nested_arrays); |
22652 | |
22653 | /* Add representation of the type of the elements of this array type and |
22654 | emit the corresponding DIE if we haven't done it already. */ |
22655 | element_type = TREE_TYPE (type); |
22656 | if (collapse_nested_arrays) |
22657 | while (TREE_CODE (element_type) == ARRAY_TYPE) |
22658 | { |
22659 | if (TYPE_STRING_FLAG (element_type) && is_fortran ()) |
22660 | break; |
22661 | element_type = TREE_TYPE (element_type); |
22662 | } |
22663 | |
22664 | add_type_attribute (object_die: array_die, type: element_type, cv_quals: TYPE_UNQUALIFIED, |
22665 | TREE_CODE (type) == ARRAY_TYPE |
22666 | && TYPE_REVERSE_STORAGE_ORDER (type), |
22667 | context_die); |
22668 | |
22669 | add_gnat_descriptive_type_attribute (die: array_die, type, context_die); |
22670 | if (TYPE_ARTIFICIAL (type)) |
22671 | add_AT_flag (die: array_die, attr_kind: DW_AT_artificial, flag: 1); |
22672 | |
22673 | if (get_AT (die: array_die, attr_kind: DW_AT_name)) |
22674 | add_pubtype (decl: type, die: array_die); |
22675 | |
22676 | add_alignment_attribute (die: array_die, tree_node: type); |
22677 | } |
22678 | |
22679 | /* This routine generates DIE for array with hidden descriptor, details |
22680 | are filled into *info by a langhook. */ |
22681 | |
22682 | static void |
22683 | gen_descr_array_type_die (tree type, struct array_descr_info *info, |
22684 | dw_die_ref context_die) |
22685 | { |
22686 | const dw_die_ref scope_die = scope_die_for (t: type, context_die); |
22687 | const dw_die_ref array_die = new_die (tag_value: DW_TAG_array_type, parent_die: scope_die, t: type); |
22688 | struct loc_descr_context context = { |
22689 | .context_type: type, /* context_type */ |
22690 | .base_decl: info->base_decl, /* base_decl */ |
22691 | NULL, /* dpi */ |
22692 | .placeholder_arg: false, /* placeholder_arg */ |
22693 | .placeholder_seen: false, /* placeholder_seen */ |
22694 | .strict_signedness: false /* strict_signedness */ |
22695 | }; |
22696 | enum dwarf_tag subrange_tag = DW_TAG_subrange_type; |
22697 | int dim; |
22698 | |
22699 | add_name_attribute (die: array_die, name_string: type_tag (type)); |
22700 | equate_type_number_to_die (type, type_die: array_die); |
22701 | |
22702 | if (info->ndimensions > 1) |
22703 | switch (info->ordering) |
22704 | { |
22705 | case array_descr_ordering_row_major: |
22706 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_row_major); |
22707 | break; |
22708 | case array_descr_ordering_column_major: |
22709 | add_AT_unsigned (die: array_die, attr_kind: DW_AT_ordering, unsigned_val: DW_ORD_col_major); |
22710 | break; |
22711 | default: |
22712 | break; |
22713 | } |
22714 | |
22715 | if (dwarf_version >= 3 || !dwarf_strict) |
22716 | { |
22717 | if (info->data_location) |
22718 | add_scalar_info (die: array_die, attr: DW_AT_data_location, value: info->data_location, |
22719 | forms: dw_scalar_form_exprloc, context: &context); |
22720 | if (info->associated) |
22721 | add_scalar_info (die: array_die, attr: DW_AT_associated, value: info->associated, |
22722 | forms: dw_scalar_form_constant |
22723 | | dw_scalar_form_exprloc |
22724 | | dw_scalar_form_reference, context: &context); |
22725 | if (info->allocated) |
22726 | add_scalar_info (die: array_die, attr: DW_AT_allocated, value: info->allocated, |
22727 | forms: dw_scalar_form_constant |
22728 | | dw_scalar_form_exprloc |
22729 | | dw_scalar_form_reference, context: &context); |
22730 | if (info->stride) |
22731 | { |
22732 | const enum dwarf_attribute attr |
22733 | = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride; |
22734 | const int forms |
22735 | = (info->stride_in_bits) |
22736 | ? dw_scalar_form_constant |
22737 | : (dw_scalar_form_constant |
22738 | | dw_scalar_form_exprloc |
22739 | | dw_scalar_form_reference); |
22740 | |
22741 | add_scalar_info (die: array_die, attr, value: info->stride, forms, context: &context); |
22742 | } |
22743 | } |
22744 | if (dwarf_version >= 5) |
22745 | { |
22746 | if (info->rank) |
22747 | { |
22748 | add_scalar_info (die: array_die, attr: DW_AT_rank, value: info->rank, |
22749 | forms: dw_scalar_form_constant |
22750 | | dw_scalar_form_exprloc, context: &context); |
22751 | subrange_tag = DW_TAG_generic_subrange; |
22752 | context.placeholder_arg = true; |
22753 | } |
22754 | } |
22755 | |
22756 | add_gnat_descriptive_type_attribute (die: array_die, type, context_die); |
22757 | |
22758 | for (dim = 0; dim < info->ndimensions; dim++) |
22759 | { |
22760 | dw_die_ref subrange_die = new_die (tag_value: subrange_tag, parent_die: array_die, NULL); |
22761 | |
22762 | if (info->dimen[dim].bounds_type) |
22763 | add_type_attribute (object_die: subrange_die, |
22764 | type: info->dimen[dim].bounds_type, cv_quals: TYPE_UNQUALIFIED, |
22765 | reverse: false, context_die); |
22766 | if (info->dimen[dim].lower_bound) |
22767 | add_bound_info (subrange_die, bound_attr: DW_AT_lower_bound, |
22768 | bound: info->dimen[dim].lower_bound, context: &context); |
22769 | if (info->dimen[dim].upper_bound) |
22770 | add_bound_info (subrange_die, bound_attr: DW_AT_upper_bound, |
22771 | bound: info->dimen[dim].upper_bound, context: &context); |
22772 | if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride) |
22773 | add_scalar_info (die: subrange_die, attr: DW_AT_byte_stride, |
22774 | value: info->dimen[dim].stride, |
22775 | forms: dw_scalar_form_constant |
22776 | | dw_scalar_form_exprloc |
22777 | | dw_scalar_form_reference, |
22778 | context: &context); |
22779 | } |
22780 | |
22781 | gen_type_die (info->element_type, context_die); |
22782 | add_type_attribute (object_die: array_die, type: info->element_type, cv_quals: TYPE_UNQUALIFIED, |
22783 | TREE_CODE (type) == ARRAY_TYPE |
22784 | && TYPE_REVERSE_STORAGE_ORDER (type), |
22785 | context_die); |
22786 | |
22787 | if (get_AT (die: array_die, attr_kind: DW_AT_name)) |
22788 | add_pubtype (decl: type, die: array_die); |
22789 | |
22790 | add_alignment_attribute (die: array_die, tree_node: type); |
22791 | } |
22792 | |
22793 | #if 0 |
22794 | static void |
22795 | gen_entry_point_die (tree decl, dw_die_ref context_die) |
22796 | { |
22797 | tree origin = decl_ultimate_origin (decl); |
22798 | dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl); |
22799 | |
22800 | if (origin != NULL) |
22801 | add_abstract_origin_attribute (decl_die, origin); |
22802 | else |
22803 | { |
22804 | add_name_and_src_coords_attributes (decl_die, decl); |
22805 | add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)), |
22806 | TYPE_UNQUALIFIED, false, context_die); |
22807 | } |
22808 | |
22809 | if (DECL_ABSTRACT_P (decl)) |
22810 | equate_decl_number_to_die (decl, decl_die); |
22811 | else |
22812 | add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl)); |
22813 | } |
22814 | #endif |
22815 | |
22816 | /* Walk through the list of incomplete types again, trying once more to |
22817 | emit full debugging info for them. */ |
22818 | |
22819 | static void |
22820 | retry_incomplete_types (void) |
22821 | { |
22822 | set_early_dwarf s; |
22823 | int i; |
22824 | |
22825 | for (i = vec_safe_length (v: incomplete_types) - 1; i >= 0; i--) |
22826 | if (should_emit_struct_debug (type: (*incomplete_types)[i], usage: DINFO_USAGE_DIR_USE)) |
22827 | gen_type_die ((*incomplete_types)[i], comp_unit_die ()); |
22828 | vec_safe_truncate (v: incomplete_types, size: 0); |
22829 | } |
22830 | |
22831 | /* Determine what tag to use for a record type. */ |
22832 | |
22833 | static enum dwarf_tag |
22834 | record_type_tag (tree type) |
22835 | { |
22836 | if (! lang_hooks.types.classify_record) |
22837 | return DW_TAG_structure_type; |
22838 | |
22839 | switch (lang_hooks.types.classify_record (type)) |
22840 | { |
22841 | case RECORD_IS_STRUCT: |
22842 | return DW_TAG_structure_type; |
22843 | |
22844 | case RECORD_IS_CLASS: |
22845 | return DW_TAG_class_type; |
22846 | |
22847 | case RECORD_IS_INTERFACE: |
22848 | if (dwarf_version >= 3 || !dwarf_strict) |
22849 | return DW_TAG_interface_type; |
22850 | return DW_TAG_structure_type; |
22851 | |
22852 | default: |
22853 | gcc_unreachable (); |
22854 | } |
22855 | } |
22856 | |
22857 | /* Generate a DIE to represent an enumeration type. Note that these DIEs |
22858 | include all of the information about the enumeration values also. Each |
22859 | enumerated type name/value is listed as a child of the enumerated type |
22860 | DIE. REVERSE is true if the type is to be interpreted in the reverse |
22861 | storage order wrt the target order. */ |
22862 | |
22863 | static dw_die_ref |
22864 | gen_enumeration_type_die (tree type, dw_die_ref context_die, bool reverse) |
22865 | { |
22866 | dw_die_ref type_die = lookup_type_die (type); |
22867 | dw_die_ref orig_type_die = type_die; |
22868 | |
22869 | if (type_die == NULL || reverse) |
22870 | { |
22871 | dw_die_ref scope_die = scope_die_for (t: type, context_die); |
22872 | |
22873 | /* The DIE with DW_AT_endianity is placed right after the naked DIE. */ |
22874 | if (reverse) |
22875 | { |
22876 | gcc_assert (type_die); |
22877 | dw_die_ref after_die = type_die; |
22878 | type_die = new_die_raw (tag_value: DW_TAG_enumeration_type); |
22879 | add_child_die_after (die: scope_die, child_die: type_die, after_die); |
22880 | } |
22881 | else |
22882 | { |
22883 | type_die = new_die (tag_value: DW_TAG_enumeration_type, parent_die: scope_die, t: type); |
22884 | equate_type_number_to_die (type, type_die); |
22885 | } |
22886 | add_name_attribute (die: type_die, name_string: type_tag (type)); |
22887 | if ((dwarf_version >= 4 || !dwarf_strict) |
22888 | && ENUM_IS_SCOPED (type)) |
22889 | add_AT_flag (die: type_die, attr_kind: DW_AT_enum_class, flag: 1); |
22890 | if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type)) |
22891 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
22892 | if (!dwarf_strict) |
22893 | add_AT_unsigned (die: type_die, attr_kind: DW_AT_encoding, |
22894 | TYPE_UNSIGNED (type) |
22895 | ? DW_ATE_unsigned |
22896 | : DW_ATE_signed); |
22897 | if (reverse) |
22898 | add_AT_unsigned (die: type_die, attr_kind: DW_AT_endianity, |
22899 | BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big); |
22900 | } |
22901 | else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type)) |
22902 | return type_die; |
22903 | else |
22904 | remove_AT (die: type_die, attr_kind: DW_AT_declaration); |
22905 | |
22906 | /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the |
22907 | given enum type is incomplete, do not generate the DW_AT_byte_size |
22908 | attribute or the DW_AT_element_list attribute. */ |
22909 | if (TYPE_SIZE (type)) |
22910 | { |
22911 | tree link; |
22912 | |
22913 | if (!ENUM_IS_OPAQUE (type)) |
22914 | TREE_ASM_WRITTEN (type) = 1; |
22915 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_byte_size)) |
22916 | add_byte_size_attribute (die: type_die, tree_node: type); |
22917 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_alignment)) |
22918 | add_alignment_attribute (die: type_die, tree_node: type); |
22919 | if ((dwarf_version >= 3 || !dwarf_strict) |
22920 | && (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_type))) |
22921 | { |
22922 | tree underlying = lang_hooks.types.enum_underlying_base_type (type); |
22923 | add_type_attribute (object_die: type_die, type: underlying, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
22924 | context_die); |
22925 | } |
22926 | if (TYPE_STUB_DECL (type) != NULL_TREE) |
22927 | { |
22928 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_decl_file)) |
22929 | add_src_coords_attributes (die: type_die, TYPE_STUB_DECL (type)); |
22930 | if (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_accessibility)) |
22931 | add_accessibility_attribute (die: type_die, TYPE_STUB_DECL (type)); |
22932 | } |
22933 | |
22934 | /* If the first reference to this type was as the return type of an |
22935 | inline function, then it may not have a parent. Fix this now. */ |
22936 | if (type_die->die_parent == NULL) |
22937 | add_child_die (die: scope_die_for (t: type, context_die), child_die: type_die); |
22938 | |
22939 | for (link = TYPE_VALUES (type); |
22940 | link != NULL; link = TREE_CHAIN (link)) |
22941 | { |
22942 | dw_die_ref enum_die = new_die (tag_value: DW_TAG_enumerator, parent_die: type_die, t: link); |
22943 | tree value = TREE_VALUE (link); |
22944 | |
22945 | if (DECL_P (value)) |
22946 | equate_decl_number_to_die (decl: value, decl_die: enum_die); |
22947 | |
22948 | gcc_assert (!ENUM_IS_OPAQUE (type)); |
22949 | add_name_attribute (die: enum_die, |
22950 | IDENTIFIER_POINTER (TREE_PURPOSE (link))); |
22951 | |
22952 | if (TREE_CODE (value) == CONST_DECL) |
22953 | value = DECL_INITIAL (value); |
22954 | |
22955 | if (simple_type_size_in_bits (TREE_TYPE (value)) |
22956 | <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value)) |
22957 | { |
22958 | /* For constant forms created by add_AT_unsigned DWARF |
22959 | consumers (GDB, elfutils, etc.) always zero extend |
22960 | the value. Only when the actual value is negative |
22961 | do we need to use add_AT_int to generate a constant |
22962 | form that can represent negative values. */ |
22963 | HOST_WIDE_INT val = TREE_INT_CST_LOW (value); |
22964 | if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0) |
22965 | add_AT_unsigned (die: enum_die, attr_kind: DW_AT_const_value, |
22966 | unsigned_val: (unsigned HOST_WIDE_INT) val); |
22967 | else |
22968 | add_AT_int (die: enum_die, attr_kind: DW_AT_const_value, int_val: val); |
22969 | } |
22970 | else |
22971 | /* Enumeration constants may be wider than HOST_WIDE_INT. Handle |
22972 | that here. TODO: This should be re-worked to use correct |
22973 | signed/unsigned double tags for all cases. */ |
22974 | add_AT_wide (die: enum_die, attr_kind: DW_AT_const_value, w: wi::to_wide (t: value)); |
22975 | } |
22976 | |
22977 | add_gnat_descriptive_type_attribute (die: type_die, type, context_die); |
22978 | if (TYPE_ARTIFICIAL (type) |
22979 | && (!orig_type_die || !get_AT (die: type_die, attr_kind: DW_AT_artificial))) |
22980 | add_AT_flag (die: type_die, attr_kind: DW_AT_artificial, flag: 1); |
22981 | } |
22982 | else |
22983 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
22984 | |
22985 | add_pubtype (decl: type, die: type_die); |
22986 | |
22987 | return type_die; |
22988 | } |
22989 | |
22990 | /* Generate a DIE to represent either a real live formal parameter decl or to |
22991 | represent just the type of some formal parameter position in some function |
22992 | type. |
22993 | |
22994 | Note that this routine is a bit unusual because its argument may be a |
22995 | ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which |
22996 | represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE |
22997 | node. If it's the former then this function is being called to output a |
22998 | DIE to represent a formal parameter object (or some inlining thereof). If |
22999 | it's the latter, then this function is only being called to output a |
23000 | DW_TAG_formal_parameter DIE to stand as a placeholder for some formal |
23001 | argument type of some subprogram type. |
23002 | If EMIT_NAME_P is true, name and source coordinate attributes |
23003 | are emitted. */ |
23004 | |
23005 | static dw_die_ref |
23006 | gen_formal_parameter_die (tree node, tree origin, bool emit_name_p, |
23007 | dw_die_ref context_die) |
23008 | { |
23009 | tree node_or_origin = node ? node : origin; |
23010 | tree ultimate_origin; |
23011 | dw_die_ref parm_die = NULL; |
23012 | |
23013 | if (DECL_P (node_or_origin)) |
23014 | { |
23015 | parm_die = lookup_decl_die (decl: node); |
23016 | |
23017 | /* If the contexts differ, we may not be talking about the same |
23018 | thing. |
23019 | ??? When in LTO the DIE parent is the "abstract" copy and the |
23020 | context_die is the specification "copy". */ |
23021 | if (parm_die |
23022 | && parm_die->die_parent != context_die |
23023 | && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack |
23024 | || parm_die->die_parent->die_parent != context_die) |
23025 | && !in_lto_p) |
23026 | { |
23027 | gcc_assert (!DECL_ABSTRACT_P (node)); |
23028 | /* This can happen when creating a concrete instance, in |
23029 | which case we need to create a new DIE that will get |
23030 | annotated with DW_AT_abstract_origin. */ |
23031 | parm_die = NULL; |
23032 | } |
23033 | |
23034 | if (parm_die && parm_die->die_parent == NULL) |
23035 | { |
23036 | /* Check that parm_die already has the right attributes that |
23037 | we would have added below. If any attributes are |
23038 | missing, fall through to add them. */ |
23039 | if (! DECL_ABSTRACT_P (node_or_origin) |
23040 | && !get_AT (die: parm_die, attr_kind: DW_AT_location) |
23041 | && !get_AT (die: parm_die, attr_kind: DW_AT_const_value)) |
23042 | /* We are missing location info, and are about to add it. */ |
23043 | ; |
23044 | else |
23045 | { |
23046 | add_child_die (die: context_die, child_die: parm_die); |
23047 | return parm_die; |
23048 | } |
23049 | } |
23050 | } |
23051 | |
23052 | /* If we have a previously generated DIE, use it, unless this is an |
23053 | concrete instance (origin != NULL), in which case we need a new |
23054 | DIE with a corresponding DW_AT_abstract_origin. */ |
23055 | bool reusing_die; |
23056 | if (parm_die && origin == NULL) |
23057 | reusing_die = true; |
23058 | else |
23059 | { |
23060 | parm_die = new_die (tag_value: DW_TAG_formal_parameter, parent_die: context_die, t: node); |
23061 | reusing_die = false; |
23062 | } |
23063 | |
23064 | switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin))) |
23065 | { |
23066 | case tcc_declaration: |
23067 | ultimate_origin = decl_ultimate_origin (decl: node_or_origin); |
23068 | if (node || ultimate_origin) |
23069 | origin = ultimate_origin; |
23070 | |
23071 | if (reusing_die) |
23072 | goto add_location; |
23073 | |
23074 | if (origin != NULL) |
23075 | add_abstract_origin_attribute (die: parm_die, origin); |
23076 | else if (emit_name_p) |
23077 | add_name_and_src_coords_attributes (die: parm_die, decl: node); |
23078 | if (origin == NULL |
23079 | || (! DECL_ABSTRACT_P (node_or_origin) |
23080 | && variably_modified_type_p (TREE_TYPE (node_or_origin), |
23081 | decl_function_context |
23082 | (node_or_origin)))) |
23083 | { |
23084 | tree type = TREE_TYPE (node_or_origin); |
23085 | if (decl_by_reference_p (decl: node_or_origin)) |
23086 | add_type_attribute (object_die: parm_die, TREE_TYPE (type), |
23087 | cv_quals: TYPE_UNQUALIFIED, |
23088 | reverse: false, context_die); |
23089 | else |
23090 | add_type_attribute (object_die: parm_die, type, |
23091 | cv_quals: decl_quals (decl: node_or_origin), |
23092 | reverse: false, context_die); |
23093 | } |
23094 | if (origin == NULL && DECL_ARTIFICIAL (node)) |
23095 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
23096 | add_location: |
23097 | if (node && node != origin) |
23098 | equate_decl_number_to_die (decl: node, decl_die: parm_die); |
23099 | if (! DECL_ABSTRACT_P (node_or_origin)) |
23100 | add_location_or_const_value_attribute (die: parm_die, decl: node_or_origin, |
23101 | cache_p: node == NULL); |
23102 | |
23103 | break; |
23104 | |
23105 | case tcc_type: |
23106 | /* We were called with some kind of a ..._TYPE node. */ |
23107 | add_type_attribute (object_die: parm_die, type: node_or_origin, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
23108 | context_die); |
23109 | break; |
23110 | |
23111 | default: |
23112 | gcc_unreachable (); |
23113 | } |
23114 | |
23115 | return parm_die; |
23116 | } |
23117 | |
23118 | /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate |
23119 | children DW_TAG_formal_parameter DIEs representing the arguments of the |
23120 | parameter pack. |
23121 | |
23122 | PARM_PACK must be a function parameter pack. |
23123 | PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN |
23124 | must point to the subsequent arguments of the function PACK_ARG belongs to. |
23125 | SUBR_DIE is the DIE of the function PACK_ARG belongs to. |
23126 | If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument |
23127 | following the last one for which a DIE was generated. */ |
23128 | |
23129 | static dw_die_ref |
23130 | gen_formal_parameter_pack_die (tree parm_pack, |
23131 | tree pack_arg, |
23132 | dw_die_ref subr_die, |
23133 | tree *next_arg) |
23134 | { |
23135 | tree arg; |
23136 | dw_die_ref parm_pack_die; |
23137 | |
23138 | gcc_assert (parm_pack |
23139 | && lang_hooks.function_parameter_pack_p (parm_pack) |
23140 | && subr_die); |
23141 | |
23142 | parm_pack_die = new_die (tag_value: DW_TAG_GNU_formal_parameter_pack, parent_die: subr_die, t: parm_pack); |
23143 | add_src_coords_attributes (die: parm_pack_die, decl: parm_pack); |
23144 | |
23145 | for (arg = pack_arg; arg; arg = DECL_CHAIN (arg)) |
23146 | { |
23147 | if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg, |
23148 | parm_pack)) |
23149 | break; |
23150 | gen_formal_parameter_die (node: arg, NULL, |
23151 | emit_name_p: false /* Don't emit name attribute. */, |
23152 | context_die: parm_pack_die); |
23153 | } |
23154 | if (next_arg) |
23155 | *next_arg = arg; |
23156 | return parm_pack_die; |
23157 | } |
23158 | |
23159 | /* Generate a special type of DIE used as a stand-in for a trailing ellipsis |
23160 | at the end of an (ANSI prototyped) formal parameters list. */ |
23161 | |
23162 | static void |
23163 | gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die) |
23164 | { |
23165 | new_die (tag_value: DW_TAG_unspecified_parameters, parent_die: context_die, t: decl_or_type); |
23166 | } |
23167 | |
23168 | /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a |
23169 | DW_TAG_unspecified_parameters DIE) to represent the types of the formal |
23170 | parameters as specified in some function type specification (except for |
23171 | those which appear as part of a function *definition*). */ |
23172 | |
23173 | static void |
23174 | gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die) |
23175 | { |
23176 | tree link; |
23177 | tree formal_type = NULL; |
23178 | tree first_parm_type; |
23179 | tree arg; |
23180 | |
23181 | if (TREE_CODE (function_or_method_type) == FUNCTION_DECL) |
23182 | { |
23183 | arg = DECL_ARGUMENTS (function_or_method_type); |
23184 | function_or_method_type = TREE_TYPE (function_or_method_type); |
23185 | } |
23186 | else |
23187 | arg = NULL_TREE; |
23188 | |
23189 | first_parm_type = TYPE_ARG_TYPES (function_or_method_type); |
23190 | |
23191 | /* Make our first pass over the list of formal parameter types and output a |
23192 | DW_TAG_formal_parameter DIE for each one. */ |
23193 | for (link = first_parm_type; link; ) |
23194 | { |
23195 | dw_die_ref parm_die; |
23196 | |
23197 | formal_type = TREE_VALUE (link); |
23198 | if (formal_type == void_type_node) |
23199 | break; |
23200 | |
23201 | /* Output a (nameless) DIE to represent the formal parameter itself. */ |
23202 | parm_die = gen_formal_parameter_die (node: formal_type, NULL, |
23203 | emit_name_p: true /* Emit name attribute. */, |
23204 | context_die); |
23205 | if (TREE_CODE (function_or_method_type) == METHOD_TYPE |
23206 | && link == first_parm_type) |
23207 | { |
23208 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
23209 | if (dwarf_version >= 3 || !dwarf_strict) |
23210 | add_AT_die_ref (die: context_die, attr_kind: DW_AT_object_pointer, targ_die: parm_die); |
23211 | } |
23212 | else if (arg && DECL_ARTIFICIAL (arg)) |
23213 | add_AT_flag (die: parm_die, attr_kind: DW_AT_artificial, flag: 1); |
23214 | |
23215 | link = TREE_CHAIN (link); |
23216 | if (arg) |
23217 | arg = DECL_CHAIN (arg); |
23218 | } |
23219 | |
23220 | /* If this function type has an ellipsis, add a |
23221 | DW_TAG_unspecified_parameters DIE to the end of the parameter list. */ |
23222 | if (formal_type != void_type_node) |
23223 | gen_unspecified_parameters_die (decl_or_type: function_or_method_type, context_die); |
23224 | |
23225 | /* Make our second (and final) pass over the list of formal parameter types |
23226 | and output DIEs to represent those types (as necessary). */ |
23227 | for (link = TYPE_ARG_TYPES (function_or_method_type); |
23228 | link && TREE_VALUE (link); |
23229 | link = TREE_CHAIN (link)) |
23230 | gen_type_die (TREE_VALUE (link), context_die); |
23231 | } |
23232 | |
23233 | /* We want to generate the DIE for TYPE so that we can generate the |
23234 | die for MEMBER, which has been defined; we will need to refer back |
23235 | to the member declaration nested within TYPE. If we're trying to |
23236 | generate minimal debug info for TYPE, processing TYPE won't do the |
23237 | trick; we need to attach the member declaration by hand. */ |
23238 | |
23239 | static void |
23240 | gen_type_die_for_member (tree type, tree member, dw_die_ref context_die) |
23241 | { |
23242 | gen_type_die (type, context_die); |
23243 | |
23244 | /* If we're trying to avoid duplicate debug info, we may not have |
23245 | emitted the member decl for this function. Emit it now. */ |
23246 | if (TYPE_STUB_DECL (type) |
23247 | && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type)) |
23248 | && ! lookup_decl_die (decl: member)) |
23249 | { |
23250 | dw_die_ref type_die; |
23251 | gcc_assert (!decl_ultimate_origin (member)); |
23252 | |
23253 | type_die = lookup_type_die_strip_naming_typedef (type); |
23254 | if (TREE_CODE (member) == FUNCTION_DECL) |
23255 | gen_subprogram_die (member, type_die); |
23256 | else if (TREE_CODE (member) == FIELD_DECL) |
23257 | { |
23258 | /* Ignore the nameless fields that are used to skip bits but handle |
23259 | C++ anonymous unions and structs. */ |
23260 | if (DECL_NAME (member) != NULL_TREE |
23261 | || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE |
23262 | || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE) |
23263 | { |
23264 | struct vlr_context vlr_ctx = { |
23265 | DECL_CONTEXT (member), /* struct_type */ |
23266 | NULL_TREE /* variant_part_offset */ |
23267 | }; |
23268 | gen_type_die (member_declared_type (member), type_die); |
23269 | gen_field_die (member, &vlr_ctx, type_die); |
23270 | } |
23271 | } |
23272 | else |
23273 | gen_variable_die (member, NULL_TREE, type_die); |
23274 | } |
23275 | } |
23276 | |
23277 | /* Forward declare these functions, because they are mutually recursive |
23278 | with their set_block_* pairing functions. */ |
23279 | static void set_decl_origin_self (tree); |
23280 | |
23281 | /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the |
23282 | given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so |
23283 | that it points to the node itself, thus indicating that the node is its |
23284 | own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for |
23285 | the given node is NULL, recursively descend the decl/block tree which |
23286 | it is the root of, and for each other ..._DECL or BLOCK node contained |
23287 | therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also |
23288 | still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN |
23289 | values to point to themselves. */ |
23290 | |
23291 | static void |
23292 | set_block_origin_self (tree stmt) |
23293 | { |
23294 | if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE) |
23295 | { |
23296 | BLOCK_ABSTRACT_ORIGIN (stmt) = stmt; |
23297 | |
23298 | { |
23299 | tree local_decl; |
23300 | |
23301 | for (local_decl = BLOCK_VARS (stmt); |
23302 | local_decl != NULL_TREE; |
23303 | local_decl = DECL_CHAIN (local_decl)) |
23304 | /* Do not recurse on nested functions since the inlining status |
23305 | of parent and child can be different as per the DWARF spec. */ |
23306 | if (TREE_CODE (local_decl) != FUNCTION_DECL |
23307 | && !DECL_EXTERNAL (local_decl)) |
23308 | set_decl_origin_self (local_decl); |
23309 | } |
23310 | |
23311 | { |
23312 | tree subblock; |
23313 | |
23314 | for (subblock = BLOCK_SUBBLOCKS (stmt); |
23315 | subblock != NULL_TREE; |
23316 | subblock = BLOCK_CHAIN (subblock)) |
23317 | set_block_origin_self (subblock); /* Recurse. */ |
23318 | } |
23319 | } |
23320 | } |
23321 | |
23322 | /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for |
23323 | the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the |
23324 | node to so that it points to the node itself, thus indicating that the |
23325 | node represents its own (abstract) origin. Additionally, if the |
23326 | DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend |
23327 | the decl/block tree of which the given node is the root of, and for |
23328 | each other ..._DECL or BLOCK node contained therein whose |
23329 | DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL, |
23330 | set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to |
23331 | point to themselves. */ |
23332 | |
23333 | static void |
23334 | set_decl_origin_self (tree decl) |
23335 | { |
23336 | if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE) |
23337 | { |
23338 | DECL_ABSTRACT_ORIGIN (decl) = decl; |
23339 | if (TREE_CODE (decl) == FUNCTION_DECL) |
23340 | { |
23341 | tree arg; |
23342 | |
23343 | for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg)) |
23344 | DECL_ABSTRACT_ORIGIN (arg) = arg; |
23345 | if (DECL_INITIAL (decl) != NULL_TREE |
23346 | && DECL_INITIAL (decl) != error_mark_node) |
23347 | set_block_origin_self (DECL_INITIAL (decl)); |
23348 | } |
23349 | } |
23350 | } |
23351 | |
23352 | /* Mark the early DIE for DECL as the abstract instance. */ |
23353 | |
23354 | static void |
23355 | dwarf2out_abstract_function (tree decl) |
23356 | { |
23357 | dw_die_ref old_die; |
23358 | |
23359 | /* Make sure we have the actual abstract inline, not a clone. */ |
23360 | decl = DECL_ORIGIN (decl); |
23361 | |
23362 | if (DECL_IGNORED_P (decl)) |
23363 | return; |
23364 | |
23365 | /* In LTO we're all set. We already created abstract instances |
23366 | early and we want to avoid creating a concrete instance of that |
23367 | if we don't output it. */ |
23368 | if (in_lto_p) |
23369 | return; |
23370 | |
23371 | old_die = lookup_decl_die (decl); |
23372 | gcc_assert (old_die != NULL); |
23373 | if (get_AT (die: old_die, attr_kind: DW_AT_inline)) |
23374 | /* We've already generated the abstract instance. */ |
23375 | return; |
23376 | |
23377 | /* Go ahead and put DW_AT_inline on the DIE. */ |
23378 | if (DECL_DECLARED_INLINE_P (decl)) |
23379 | { |
23380 | if (cgraph_function_possibly_inlined_p (decl)) |
23381 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_declared_inlined); |
23382 | else |
23383 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_declared_not_inlined); |
23384 | } |
23385 | else |
23386 | { |
23387 | if (cgraph_function_possibly_inlined_p (decl)) |
23388 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_inlined); |
23389 | else |
23390 | add_AT_unsigned (die: old_die, attr_kind: DW_AT_inline, unsigned_val: DW_INL_not_inlined); |
23391 | } |
23392 | |
23393 | if (DECL_DECLARED_INLINE_P (decl) |
23394 | && lookup_attribute (attr_name: "artificial" , DECL_ATTRIBUTES (decl))) |
23395 | add_AT_flag (die: old_die, attr_kind: DW_AT_artificial, flag: 1); |
23396 | |
23397 | set_decl_origin_self (decl); |
23398 | } |
23399 | |
23400 | /* Helper function of premark_used_types() which gets called through |
23401 | htab_traverse. |
23402 | |
23403 | Marks the DIE of a given type in *SLOT as perennial, so it never gets |
23404 | marked as unused by prune_unused_types. */ |
23405 | |
23406 | bool |
23407 | (tree const &type, void *) |
23408 | { |
23409 | dw_die_ref die; |
23410 | |
23411 | die = lookup_type_die (type); |
23412 | if (die != NULL) |
23413 | die->die_perennial_p = 1; |
23414 | return true; |
23415 | } |
23416 | |
23417 | /* Helper function of premark_types_used_by_global_vars which gets called |
23418 | through htab_traverse. |
23419 | |
23420 | Marks the DIE of a given type in *SLOT as perennial, so it never gets |
23421 | marked as unused by prune_unused_types. The DIE of the type is marked |
23422 | only if the global variable using the type will actually be emitted. */ |
23423 | |
23424 | int |
23425 | (types_used_by_vars_entry **slot, |
23426 | void *) |
23427 | { |
23428 | struct types_used_by_vars_entry *entry; |
23429 | dw_die_ref die; |
23430 | |
23431 | entry = (struct types_used_by_vars_entry *) *slot; |
23432 | gcc_assert (entry->type != NULL |
23433 | && entry->var_decl != NULL); |
23434 | die = lookup_type_die (type: entry->type); |
23435 | if (die) |
23436 | { |
23437 | /* Ask cgraph if the global variable really is to be emitted. |
23438 | If yes, then we'll keep the DIE of ENTRY->TYPE. */ |
23439 | varpool_node *node = varpool_node::get (decl: entry->var_decl); |
23440 | if (node && node->definition) |
23441 | { |
23442 | die->die_perennial_p = 1; |
23443 | /* Keep the parent DIEs as well. */ |
23444 | while ((die = die->die_parent) && die->die_perennial_p == 0) |
23445 | die->die_perennial_p = 1; |
23446 | } |
23447 | } |
23448 | return 1; |
23449 | } |
23450 | |
23451 | /* Mark all members of used_types_hash as perennial. */ |
23452 | |
23453 | static void |
23454 | (struct function *fun) |
23455 | { |
23456 | if (fun && fun->used_types_hash) |
23457 | fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL); |
23458 | } |
23459 | |
23460 | /* Mark all members of types_used_by_vars_entry as perennial. */ |
23461 | |
23462 | static void |
23463 | (void) |
23464 | { |
23465 | if (types_used_by_vars_hash) |
23466 | types_used_by_vars_hash |
23467 | ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL); |
23468 | } |
23469 | |
23470 | /* Mark all variables used by the symtab as perennial. */ |
23471 | |
23472 | static void |
23473 | (void) |
23474 | { |
23475 | /* Mark DIEs in the symtab as used. */ |
23476 | varpool_node *var; |
23477 | FOR_EACH_VARIABLE (var) |
23478 | { |
23479 | dw_die_ref die = lookup_decl_die (decl: var->decl); |
23480 | if (die) |
23481 | die->die_perennial_p = 1; |
23482 | } |
23483 | } |
23484 | |
23485 | /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE |
23486 | for CA_LOC call arg loc node. */ |
23487 | |
23488 | static dw_die_ref |
23489 | gen_call_site_die (tree decl, dw_die_ref subr_die, |
23490 | struct call_arg_loc_node *ca_loc) |
23491 | { |
23492 | dw_die_ref stmt_die = NULL, die; |
23493 | tree block = ca_loc->block; |
23494 | |
23495 | while (block |
23496 | && block != DECL_INITIAL (decl) |
23497 | && TREE_CODE (block) == BLOCK) |
23498 | { |
23499 | stmt_die = lookup_block_die (block); |
23500 | if (stmt_die) |
23501 | break; |
23502 | block = BLOCK_SUPERCONTEXT (block); |
23503 | } |
23504 | if (stmt_die == NULL) |
23505 | stmt_die = subr_die; |
23506 | die = new_die (tag_value: dwarf_TAG (tag: DW_TAG_call_site), parent_die: stmt_die, NULL_TREE); |
23507 | add_AT_lbl_id (die, attr_kind: dwarf_AT (at: DW_AT_call_return_pc), lbl_id: ca_loc->label); |
23508 | if (ca_loc->tail_call_p) |
23509 | add_AT_flag (die, attr_kind: dwarf_AT (at: DW_AT_call_tail_call), flag: 1); |
23510 | if (ca_loc->symbol_ref) |
23511 | { |
23512 | dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref)); |
23513 | if (tdie) |
23514 | add_AT_die_ref (die, attr_kind: dwarf_AT (at: DW_AT_call_origin), targ_die: tdie); |
23515 | else |
23516 | add_AT_addr (die, attr_kind: dwarf_AT (at: DW_AT_call_origin), addr: ca_loc->symbol_ref, |
23517 | force_direct: false); |
23518 | } |
23519 | return die; |
23520 | } |
23521 | |
23522 | /* Generate a DIE to represent a declared function (either file-scope or |
23523 | block-local). */ |
23524 | |
23525 | static void |
23526 | gen_subprogram_die (tree decl, dw_die_ref context_die) |
23527 | { |
23528 | tree origin = decl_ultimate_origin (decl); |
23529 | dw_die_ref subr_die; |
23530 | dw_die_ref old_die = lookup_decl_die (decl); |
23531 | bool old_die_had_no_children = false; |
23532 | |
23533 | /* This function gets called multiple times for different stages of |
23534 | the debug process. For example, for func() in this code: |
23535 | |
23536 | namespace S |
23537 | { |
23538 | void func() { ... } |
23539 | } |
23540 | |
23541 | ...we get called 4 times. Twice in early debug and twice in |
23542 | late debug: |
23543 | |
23544 | Early debug |
23545 | ----------- |
23546 | |
23547 | 1. Once while generating func() within the namespace. This is |
23548 | the declaration. The declaration bit below is set, as the |
23549 | context is the namespace. |
23550 | |
23551 | A new DIE will be generated with DW_AT_declaration set. |
23552 | |
23553 | 2. Once for func() itself. This is the specification. The |
23554 | declaration bit below is clear as the context is the CU. |
23555 | |
23556 | We will use the cached DIE from (1) to create a new DIE with |
23557 | DW_AT_specification pointing to the declaration in (1). |
23558 | |
23559 | Late debug via rest_of_handle_final() |
23560 | ------------------------------------- |
23561 | |
23562 | 3. Once generating func() within the namespace. This is also the |
23563 | declaration, as in (1), but this time we will early exit below |
23564 | as we have a cached DIE and a declaration needs no additional |
23565 | annotations (no locations), as the source declaration line |
23566 | info is enough. |
23567 | |
23568 | 4. Once for func() itself. As in (2), this is the specification, |
23569 | but this time we will re-use the cached DIE, and just annotate |
23570 | it with the location information that should now be available. |
23571 | |
23572 | For something without namespaces, but with abstract instances, we |
23573 | are also called a multiple times: |
23574 | |
23575 | class Base |
23576 | { |
23577 | public: |
23578 | Base (); // constructor declaration (1) |
23579 | }; |
23580 | |
23581 | Base::Base () { } // constructor specification (2) |
23582 | |
23583 | Early debug |
23584 | ----------- |
23585 | |
23586 | 1. Once for the Base() constructor by virtue of it being a |
23587 | member of the Base class. This is done via |
23588 | rest_of_type_compilation. |
23589 | |
23590 | This is a declaration, so a new DIE will be created with |
23591 | DW_AT_declaration. |
23592 | |
23593 | 2. Once for the Base() constructor definition, but this time |
23594 | while generating the abstract instance of the base |
23595 | constructor (__base_ctor) which is being generated via early |
23596 | debug of reachable functions. |
23597 | |
23598 | Even though we have a cached version of the declaration (1), |
23599 | we will create a DW_AT_specification of the declaration DIE |
23600 | in (1). |
23601 | |
23602 | 3. Once for the __base_ctor itself, but this time, we generate |
23603 | an DW_AT_abstract_origin version of the DW_AT_specification in |
23604 | (2). |
23605 | |
23606 | Late debug via rest_of_handle_final |
23607 | ----------------------------------- |
23608 | |
23609 | 4. One final time for the __base_ctor (which will have a cached |
23610 | DIE with DW_AT_abstract_origin created in (3). This time, |
23611 | we will just annotate the location information now |
23612 | available. |
23613 | */ |
23614 | int declaration = (current_function_decl != decl |
23615 | || (!DECL_INITIAL (decl) && !origin) |
23616 | || class_or_namespace_scope_p (context_die)); |
23617 | |
23618 | /* A declaration that has been previously dumped needs no |
23619 | additional information. */ |
23620 | if (old_die && declaration) |
23621 | return; |
23622 | |
23623 | if (in_lto_p && old_die && old_die->die_child == NULL) |
23624 | old_die_had_no_children = true; |
23625 | |
23626 | /* Now that the C++ front end lazily declares artificial member fns, we |
23627 | might need to retrofit the declaration into its class. */ |
23628 | if (!declaration && !origin && !old_die |
23629 | && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl)) |
23630 | && !class_or_namespace_scope_p (context_die) |
23631 | && debug_info_level > DINFO_LEVEL_TERSE) |
23632 | old_die = force_decl_die (decl); |
23633 | |
23634 | /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */ |
23635 | if (origin != NULL) |
23636 | { |
23637 | gcc_assert (!declaration || local_scope_p (context_die)); |
23638 | |
23639 | /* Fixup die_parent for the abstract instance of a nested |
23640 | inline function. */ |
23641 | if (old_die && old_die->die_parent == NULL) |
23642 | add_child_die (die: context_die, child_die: old_die); |
23643 | |
23644 | if (old_die && get_AT_ref (die: old_die, attr_kind: DW_AT_abstract_origin)) |
23645 | { |
23646 | /* If we have a DW_AT_abstract_origin we have a working |
23647 | cached version. */ |
23648 | subr_die = old_die; |
23649 | } |
23650 | else |
23651 | { |
23652 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
23653 | add_abstract_origin_attribute (die: subr_die, origin); |
23654 | /* This is where the actual code for a cloned function is. |
23655 | Let's emit linkage name attribute for it. This helps |
23656 | debuggers to e.g, set breakpoints into |
23657 | constructors/destructors when the user asks "break |
23658 | K::K". */ |
23659 | add_linkage_name (die: subr_die, decl); |
23660 | } |
23661 | } |
23662 | /* A cached copy, possibly from early dwarf generation. Reuse as |
23663 | much as possible. */ |
23664 | else if (old_die) |
23665 | { |
23666 | if (!get_AT_flag (die: old_die, attr_kind: DW_AT_declaration) |
23667 | /* We can have a normal definition following an inline one in the |
23668 | case of redefinition of GNU C extern inlines. |
23669 | It seems reasonable to use AT_specification in this case. */ |
23670 | && !get_AT (die: old_die, attr_kind: DW_AT_inline)) |
23671 | { |
23672 | /* Detect and ignore this case, where we are trying to output |
23673 | something we have already output. */ |
23674 | if (get_AT (die: old_die, attr_kind: DW_AT_low_pc) |
23675 | || get_AT (die: old_die, attr_kind: DW_AT_ranges)) |
23676 | return; |
23677 | |
23678 | /* If we have no location information, this must be a |
23679 | partially generated DIE from early dwarf generation. |
23680 | Fall through and generate it. */ |
23681 | } |
23682 | |
23683 | /* If the definition comes from the same place as the declaration, |
23684 | maybe use the old DIE. We always want the DIE for this function |
23685 | that has the *_pc attributes to be under comp_unit_die so the |
23686 | debugger can find it. We also need to do this for abstract |
23687 | instances of inlines, since the spec requires the out-of-line copy |
23688 | to have the same parent. For local class methods, this doesn't |
23689 | apply; we just use the old DIE. */ |
23690 | expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl)); |
23691 | struct dwarf_file_data * file_index = lookup_filename (s.file); |
23692 | if (((is_unit_die (c: old_die->die_parent) |
23693 | /* This condition fixes the inconsistency/ICE with the |
23694 | following Fortran test (or some derivative thereof) while |
23695 | building libgfortran: |
23696 | |
23697 | module some_m |
23698 | contains |
23699 | logical function funky (FLAG) |
23700 | funky = .true. |
23701 | end function |
23702 | end module |
23703 | */ |
23704 | || (old_die->die_parent |
23705 | && old_die->die_parent->die_tag == DW_TAG_module) |
23706 | || local_scope_p (context_die: old_die->die_parent) |
23707 | || context_die == NULL) |
23708 | && (DECL_ARTIFICIAL (decl) |
23709 | || (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) == file_index |
23710 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) |
23711 | == (unsigned) s.line) |
23712 | && (!debug_column_info |
23713 | || s.column == 0 |
23714 | || (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
23715 | == (unsigned) s.column))))) |
23716 | /* With LTO if there's an abstract instance for |
23717 | the old DIE, this is a concrete instance and |
23718 | thus re-use the DIE. */ |
23719 | || get_AT (die: old_die, attr_kind: DW_AT_abstract_origin)) |
23720 | { |
23721 | subr_die = old_die; |
23722 | |
23723 | /* Clear out the declaration attribute, but leave the |
23724 | parameters so they can be augmented with location |
23725 | information later. Unless this was a declaration, in |
23726 | which case, wipe out the nameless parameters and recreate |
23727 | them further down. */ |
23728 | if (remove_AT (die: subr_die, attr_kind: DW_AT_declaration)) |
23729 | { |
23730 | |
23731 | remove_AT (die: subr_die, attr_kind: DW_AT_object_pointer); |
23732 | remove_child_TAG (die: subr_die, tag: DW_TAG_formal_parameter); |
23733 | } |
23734 | } |
23735 | /* Make a specification pointing to the previously built |
23736 | declaration. */ |
23737 | else |
23738 | { |
23739 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
23740 | add_AT_specification (die: subr_die, targ_die: old_die); |
23741 | add_pubname (decl, die: subr_die); |
23742 | if (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) != file_index) |
23743 | add_AT_file (die: subr_die, attr_kind: DW_AT_decl_file, fd: file_index); |
23744 | if (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) != (unsigned) s.line) |
23745 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
23746 | if (debug_column_info |
23747 | && s.column |
23748 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
23749 | != (unsigned) s.column)) |
23750 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
23751 | |
23752 | /* If the prototype had an 'auto' or 'decltype(auto)' in |
23753 | the return type, emit the real type on the definition die. */ |
23754 | if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE) |
23755 | { |
23756 | dw_die_ref die = get_AT_ref (die: old_die, attr_kind: DW_AT_type); |
23757 | while (die |
23758 | && (die->die_tag == DW_TAG_reference_type |
23759 | || die->die_tag == DW_TAG_rvalue_reference_type |
23760 | || die->die_tag == DW_TAG_pointer_type |
23761 | || die->die_tag == DW_TAG_const_type |
23762 | || die->die_tag == DW_TAG_volatile_type |
23763 | || die->die_tag == DW_TAG_restrict_type |
23764 | || die->die_tag == DW_TAG_array_type |
23765 | || die->die_tag == DW_TAG_ptr_to_member_type |
23766 | || die->die_tag == DW_TAG_subroutine_type)) |
23767 | die = get_AT_ref (die, attr_kind: DW_AT_type); |
23768 | if (die == auto_die || die == decltype_auto_die) |
23769 | add_type_attribute (object_die: subr_die, TREE_TYPE (TREE_TYPE (decl)), |
23770 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
23771 | } |
23772 | |
23773 | /* When we process the method declaration, we haven't seen |
23774 | the out-of-class defaulted definition yet, so we have to |
23775 | recheck now. */ |
23776 | if ((dwarf_version >= 5 || ! dwarf_strict) |
23777 | && !get_AT (die: subr_die, attr_kind: DW_AT_defaulted)) |
23778 | { |
23779 | int defaulted |
23780 | = lang_hooks.decls.decl_dwarf_attribute (decl, |
23781 | DW_AT_defaulted); |
23782 | if (defaulted != -1) |
23783 | { |
23784 | /* Other values must have been handled before. */ |
23785 | gcc_assert (defaulted == DW_DEFAULTED_out_of_class); |
23786 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_defaulted, unsigned_val: defaulted); |
23787 | } |
23788 | } |
23789 | } |
23790 | } |
23791 | /* Create a fresh DIE for anything else. */ |
23792 | else |
23793 | { |
23794 | subr_die = new_die (tag_value: DW_TAG_subprogram, parent_die: context_die, t: decl); |
23795 | |
23796 | if (TREE_PUBLIC (decl)) |
23797 | add_AT_flag (die: subr_die, attr_kind: DW_AT_external, flag: 1); |
23798 | |
23799 | add_name_and_src_coords_attributes (die: subr_die, decl); |
23800 | add_pubname (decl, die: subr_die); |
23801 | if (debug_info_level > DINFO_LEVEL_TERSE) |
23802 | { |
23803 | add_prototyped_attribute (die: subr_die, TREE_TYPE (decl)); |
23804 | add_type_attribute (object_die: subr_die, TREE_TYPE (TREE_TYPE (decl)), |
23805 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
23806 | } |
23807 | |
23808 | add_pure_or_virtual_attribute (die: subr_die, func_decl: decl); |
23809 | if (DECL_ARTIFICIAL (decl)) |
23810 | add_AT_flag (die: subr_die, attr_kind: DW_AT_artificial, flag: 1); |
23811 | |
23812 | if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict)) |
23813 | add_AT_flag (die: subr_die, attr_kind: DW_AT_noreturn, flag: 1); |
23814 | |
23815 | add_alignment_attribute (die: subr_die, tree_node: decl); |
23816 | |
23817 | add_accessibility_attribute (die: subr_die, decl); |
23818 | } |
23819 | |
23820 | /* Unless we have an existing non-declaration DIE, equate the new |
23821 | DIE. */ |
23822 | if (!old_die || is_declaration_die (die: old_die)) |
23823 | equate_decl_number_to_die (decl, decl_die: subr_die); |
23824 | |
23825 | if (declaration) |
23826 | { |
23827 | if (!old_die || !get_AT (die: old_die, attr_kind: DW_AT_inline)) |
23828 | { |
23829 | add_AT_flag (die: subr_die, attr_kind: DW_AT_declaration, flag: 1); |
23830 | |
23831 | /* If this is an explicit function declaration then generate |
23832 | a DW_AT_explicit attribute. */ |
23833 | if ((dwarf_version >= 3 || !dwarf_strict) |
23834 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
23835 | DW_AT_explicit) == 1) |
23836 | add_AT_flag (die: subr_die, attr_kind: DW_AT_explicit, flag: 1); |
23837 | |
23838 | /* If this is a C++11 deleted special function member then generate |
23839 | a DW_AT_deleted attribute. */ |
23840 | if ((dwarf_version >= 5 || !dwarf_strict) |
23841 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
23842 | DW_AT_deleted) == 1) |
23843 | add_AT_flag (die: subr_die, attr_kind: DW_AT_deleted, flag: 1); |
23844 | |
23845 | /* If this is a C++11 defaulted special function member then |
23846 | generate a DW_AT_defaulted attribute. */ |
23847 | if (dwarf_version >= 5 || !dwarf_strict) |
23848 | { |
23849 | int defaulted |
23850 | = lang_hooks.decls.decl_dwarf_attribute (decl, |
23851 | DW_AT_defaulted); |
23852 | if (defaulted != -1) |
23853 | add_AT_unsigned (die: subr_die, attr_kind: DW_AT_defaulted, unsigned_val: defaulted); |
23854 | } |
23855 | |
23856 | /* If this is a C++11 non-static member function with & ref-qualifier |
23857 | then generate a DW_AT_reference attribute. */ |
23858 | if ((dwarf_version >= 5 || !dwarf_strict) |
23859 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
23860 | DW_AT_reference) == 1) |
23861 | add_AT_flag (die: subr_die, attr_kind: DW_AT_reference, flag: 1); |
23862 | |
23863 | /* If this is a C++11 non-static member function with && |
23864 | ref-qualifier then generate a DW_AT_reference attribute. */ |
23865 | if ((dwarf_version >= 5 || !dwarf_strict) |
23866 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
23867 | DW_AT_rvalue_reference) |
23868 | == 1) |
23869 | add_AT_flag (die: subr_die, attr_kind: DW_AT_rvalue_reference, flag: 1); |
23870 | } |
23871 | } |
23872 | /* For non DECL_EXTERNALs, if range information is available, fill |
23873 | the DIE with it. */ |
23874 | else if (!DECL_EXTERNAL (decl) && !early_dwarf) |
23875 | { |
23876 | HOST_WIDE_INT cfa_fb_offset; |
23877 | |
23878 | struct function *fun = DECL_STRUCT_FUNCTION (decl); |
23879 | |
23880 | if (!crtl->has_bb_partition) |
23881 | { |
23882 | dw_fde_ref fde = fun->fde; |
23883 | if (fde->dw_fde_begin) |
23884 | { |
23885 | /* We have already generated the labels. */ |
23886 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, |
23887 | lbl_high: fde->dw_fde_end, force_direct: false); |
23888 | } |
23889 | else |
23890 | { |
23891 | /* Create start/end labels and add the range. */ |
23892 | char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES]; |
23893 | char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES]; |
23894 | ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL, |
23895 | current_function_funcdef_no); |
23896 | ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL, |
23897 | current_function_funcdef_no); |
23898 | add_AT_low_high_pc (die: subr_die, lbl_low: label_id_low, lbl_high: label_id_high, |
23899 | force_direct: false); |
23900 | } |
23901 | |
23902 | #if VMS_DEBUGGING_INFO |
23903 | /* HP OpenVMS Industry Standard 64: DWARF Extensions |
23904 | Section 2.3 Prologue and Epilogue Attributes: |
23905 | When a breakpoint is set on entry to a function, it is generally |
23906 | desirable for execution to be suspended, not on the very first |
23907 | instruction of the function, but rather at a point after the |
23908 | function's frame has been set up, after any language defined local |
23909 | declaration processing has been completed, and before execution of |
23910 | the first statement of the function begins. Debuggers generally |
23911 | cannot properly determine where this point is. Similarly for a |
23912 | breakpoint set on exit from a function. The prologue and epilogue |
23913 | attributes allow a compiler to communicate the location(s) to use. */ |
23914 | |
23915 | { |
23916 | if (fde->dw_fde_vms_end_prologue) |
23917 | add_AT_vms_delta (subr_die, DW_AT_HP_prologue, |
23918 | fde->dw_fde_begin, fde->dw_fde_vms_end_prologue); |
23919 | |
23920 | if (fde->dw_fde_vms_begin_epilogue) |
23921 | add_AT_vms_delta (subr_die, DW_AT_HP_epilogue, |
23922 | fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue); |
23923 | } |
23924 | #endif |
23925 | |
23926 | } |
23927 | else |
23928 | { |
23929 | /* Generate pubnames entries for the split function code ranges. */ |
23930 | dw_fde_ref fde = fun->fde; |
23931 | |
23932 | if (fde->dw_fde_second_begin) |
23933 | { |
23934 | if (dwarf_version >= 3 || !dwarf_strict) |
23935 | { |
23936 | /* We should use ranges for non-contiguous code section |
23937 | addresses. Use the actual code range for the initial |
23938 | section, since the HOT/COLD labels might precede an |
23939 | alignment offset. */ |
23940 | bool range_list_added = false; |
23941 | add_ranges_by_labels (die: subr_die, begin: fde->dw_fde_begin, |
23942 | end: fde->dw_fde_end, added: &range_list_added, |
23943 | force_direct: false); |
23944 | add_ranges_by_labels (die: subr_die, begin: fde->dw_fde_second_begin, |
23945 | end: fde->dw_fde_second_end, |
23946 | added: &range_list_added, force_direct: false); |
23947 | if (range_list_added) |
23948 | add_ranges (NULL); |
23949 | } |
23950 | else |
23951 | { |
23952 | /* There is no real support in DW2 for this .. so we make |
23953 | a work-around. First, emit the pub name for the segment |
23954 | containing the function label. Then make and emit a |
23955 | simplified subprogram DIE for the second segment with the |
23956 | name pre-fixed by __hot/cold_sect_of_. We use the same |
23957 | linkage name for the second die so that gdb will find both |
23958 | sections when given "b foo". */ |
23959 | const char *name = NULL; |
23960 | tree decl_name = DECL_NAME (decl); |
23961 | dw_die_ref seg_die; |
23962 | |
23963 | /* Do the 'primary' section. */ |
23964 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, |
23965 | lbl_high: fde->dw_fde_end, force_direct: false); |
23966 | |
23967 | /* Build a minimal DIE for the secondary section. */ |
23968 | seg_die = new_die (tag_value: DW_TAG_subprogram, |
23969 | parent_die: subr_die->die_parent, t: decl); |
23970 | |
23971 | if (TREE_PUBLIC (decl)) |
23972 | add_AT_flag (die: seg_die, attr_kind: DW_AT_external, flag: 1); |
23973 | |
23974 | if (decl_name != NULL |
23975 | && IDENTIFIER_POINTER (decl_name) != NULL) |
23976 | { |
23977 | name = dwarf2_name (decl, scope: 1); |
23978 | if (! DECL_ARTIFICIAL (decl)) |
23979 | add_src_coords_attributes (die: seg_die, decl); |
23980 | |
23981 | add_linkage_name (die: seg_die, decl); |
23982 | } |
23983 | gcc_assert (name != NULL); |
23984 | add_pure_or_virtual_attribute (die: seg_die, func_decl: decl); |
23985 | if (DECL_ARTIFICIAL (decl)) |
23986 | add_AT_flag (die: seg_die, attr_kind: DW_AT_artificial, flag: 1); |
23987 | |
23988 | name = concat ("__second_sect_of_" , name, NULL); |
23989 | add_AT_low_high_pc (die: seg_die, lbl_low: fde->dw_fde_second_begin, |
23990 | lbl_high: fde->dw_fde_second_end, force_direct: false); |
23991 | add_name_attribute (die: seg_die, name_string: name); |
23992 | if (want_pubnames ()) |
23993 | add_pubname_string (str: name, die: seg_die); |
23994 | } |
23995 | } |
23996 | else |
23997 | add_AT_low_high_pc (die: subr_die, lbl_low: fde->dw_fde_begin, lbl_high: fde->dw_fde_end, |
23998 | force_direct: false); |
23999 | } |
24000 | |
24001 | cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl); |
24002 | |
24003 | /* We define the "frame base" as the function's CFA. This is more |
24004 | convenient for several reasons: (1) It's stable across the prologue |
24005 | and epilogue, which makes it better than just a frame pointer, |
24006 | (2) With dwarf3, there exists a one-byte encoding that allows us |
24007 | to reference the .debug_frame data by proxy, but failing that, |
24008 | (3) We can at least reuse the code inspection and interpretation |
24009 | code that determines the CFA position at various points in the |
24010 | function. */ |
24011 | if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2) |
24012 | { |
24013 | dw_loc_descr_ref op = new_loc_descr (op: DW_OP_call_frame_cfa, oprnd1: 0, oprnd2: 0); |
24014 | add_AT_loc (die: subr_die, attr_kind: DW_AT_frame_base, loc: op); |
24015 | } |
24016 | else |
24017 | { |
24018 | dw_loc_list_ref list = convert_cfa_to_fb_loc_list (offset: cfa_fb_offset); |
24019 | if (list->dw_loc_next) |
24020 | add_AT_loc_list (die: subr_die, attr_kind: DW_AT_frame_base, loc_list: list); |
24021 | else |
24022 | add_AT_loc (die: subr_die, attr_kind: DW_AT_frame_base, loc: list->expr); |
24023 | } |
24024 | |
24025 | /* Compute a displacement from the "steady-state frame pointer" to |
24026 | the CFA. The former is what all stack slots and argument slots |
24027 | will reference in the rtl; the latter is what we've told the |
24028 | debugger about. We'll need to adjust all frame_base references |
24029 | by this displacement. */ |
24030 | compute_frame_pointer_to_fb_displacement (offset: cfa_fb_offset); |
24031 | |
24032 | if (fun->static_chain_decl) |
24033 | { |
24034 | /* DWARF requires here a location expression that computes the |
24035 | address of the enclosing subprogram's frame base. The machinery |
24036 | in tree-nested.cc is supposed to store this specific address in the |
24037 | last field of the FRAME record. */ |
24038 | const tree frame_type |
24039 | = TREE_TYPE (TREE_TYPE (fun->static_chain_decl)); |
24040 | const tree fb_decl = tree_last (TYPE_FIELDS (frame_type)); |
24041 | |
24042 | tree fb_expr |
24043 | = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl); |
24044 | fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl), |
24045 | fb_expr, fb_decl, NULL_TREE); |
24046 | |
24047 | add_AT_location_description (die: subr_die, attr_kind: DW_AT_static_link, |
24048 | descr: loc_list_from_tree (loc: fb_expr, want_address: 0, NULL)); |
24049 | } |
24050 | |
24051 | resolve_variable_values (); |
24052 | } |
24053 | |
24054 | /* Generate child dies for template parameters. */ |
24055 | if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE) |
24056 | gen_generic_params_dies (t: decl); |
24057 | |
24058 | /* Now output descriptions of the arguments for this function. This gets |
24059 | (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list |
24060 | for a FUNCTION_DECL doesn't indicate cases where there was a trailing |
24061 | `...' at the end of the formal parameter list. In order to find out if |
24062 | there was a trailing ellipsis or not, we must instead look at the type |
24063 | associated with the FUNCTION_DECL. This will be a node of type |
24064 | FUNCTION_TYPE. If the chain of type nodes hanging off of this |
24065 | FUNCTION_TYPE node ends with a void_type_node then there should *not* be |
24066 | an ellipsis at the end. */ |
24067 | |
24068 | /* In the case where we are describing a mere function declaration, all we |
24069 | need to do here (and all we *can* do here) is to describe the *types* of |
24070 | its formal parameters. */ |
24071 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
24072 | ; |
24073 | else if (declaration) |
24074 | gen_formal_types_die (function_or_method_type: decl, context_die: subr_die); |
24075 | else |
24076 | { |
24077 | /* Generate DIEs to represent all known formal parameters. */ |
24078 | tree parm = DECL_ARGUMENTS (decl); |
24079 | tree generic_decl = early_dwarf |
24080 | ? lang_hooks.decls.get_generic_function_decl (decl) : NULL; |
24081 | tree generic_decl_parm = generic_decl |
24082 | ? DECL_ARGUMENTS (generic_decl) |
24083 | : NULL; |
24084 | |
24085 | /* Now we want to walk the list of parameters of the function and |
24086 | emit their relevant DIEs. |
24087 | |
24088 | We consider the case of DECL being an instance of a generic function |
24089 | as well as it being a normal function. |
24090 | |
24091 | If DECL is an instance of a generic function we walk the |
24092 | parameters of the generic function declaration _and_ the parameters of |
24093 | DECL itself. This is useful because we want to emit specific DIEs for |
24094 | function parameter packs and those are declared as part of the |
24095 | generic function declaration. In that particular case, |
24096 | the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE. |
24097 | That DIE has children DIEs representing the set of arguments |
24098 | of the pack. Note that the set of pack arguments can be empty. |
24099 | In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any |
24100 | children DIE. |
24101 | |
24102 | Otherwise, we just consider the parameters of DECL. */ |
24103 | while (generic_decl_parm || parm) |
24104 | { |
24105 | if (generic_decl_parm |
24106 | && lang_hooks.function_parameter_pack_p (generic_decl_parm)) |
24107 | gen_formal_parameter_pack_die (parm_pack: generic_decl_parm, |
24108 | pack_arg: parm, subr_die, |
24109 | next_arg: &parm); |
24110 | else if (parm) |
24111 | { |
24112 | dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die); |
24113 | |
24114 | if (early_dwarf |
24115 | && parm == DECL_ARGUMENTS (decl) |
24116 | && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE |
24117 | && parm_die |
24118 | && (dwarf_version >= 3 || !dwarf_strict)) |
24119 | add_AT_die_ref (die: subr_die, attr_kind: DW_AT_object_pointer, targ_die: parm_die); |
24120 | |
24121 | parm = DECL_CHAIN (parm); |
24122 | } |
24123 | |
24124 | if (generic_decl_parm) |
24125 | generic_decl_parm = DECL_CHAIN (generic_decl_parm); |
24126 | } |
24127 | |
24128 | /* Decide whether we need an unspecified_parameters DIE at the end. |
24129 | There are 2 more cases to do this for: 1) the ansi ... declaration - |
24130 | this is detectable when the end of the arg list is not a |
24131 | void_type_node 2) an unprototyped function declaration (not a |
24132 | definition). This just means that we have no info about the |
24133 | parameters at all. */ |
24134 | if (early_dwarf) |
24135 | { |
24136 | if (prototype_p (TREE_TYPE (decl))) |
24137 | { |
24138 | /* This is the prototyped case, check for.... */ |
24139 | if (stdarg_p (TREE_TYPE (decl))) |
24140 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
24141 | } |
24142 | else if (DECL_INITIAL (decl) == NULL_TREE) |
24143 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
24144 | } |
24145 | else if ((subr_die != old_die || old_die_had_no_children) |
24146 | && prototype_p (TREE_TYPE (decl)) |
24147 | && stdarg_p (TREE_TYPE (decl))) |
24148 | gen_unspecified_parameters_die (decl_or_type: decl, context_die: subr_die); |
24149 | } |
24150 | |
24151 | if (subr_die != old_die) |
24152 | /* Add the calling convention attribute if requested. */ |
24153 | add_calling_convention_attribute (subr_die, decl); |
24154 | |
24155 | /* Output Dwarf info for all of the stuff within the body of the function |
24156 | (if it has one - it may be just a declaration). |
24157 | |
24158 | OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent |
24159 | a function. This BLOCK actually represents the outermost binding contour |
24160 | for the function, i.e. the contour in which the function's formal |
24161 | parameters and labels get declared. Curiously, it appears that the front |
24162 | end doesn't actually put the PARM_DECL nodes for the current function onto |
24163 | the BLOCK_VARS list for this outer scope, but are strung off of the |
24164 | DECL_ARGUMENTS list for the function instead. |
24165 | |
24166 | The BLOCK_VARS list for the `outer_scope' does provide us with a list of |
24167 | the LABEL_DECL nodes for the function however, and we output DWARF info |
24168 | for those in decls_for_scope. Just within the `outer_scope' there will be |
24169 | a BLOCK node representing the function's outermost pair of curly braces, |
24170 | and any blocks used for the base and member initializers of a C++ |
24171 | constructor function. */ |
24172 | tree outer_scope = DECL_INITIAL (decl); |
24173 | if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK) |
24174 | { |
24175 | int call_site_note_count = 0; |
24176 | int tail_call_site_note_count = 0; |
24177 | |
24178 | /* Emit a DW_TAG_variable DIE for a named return value. */ |
24179 | if (DECL_NAME (DECL_RESULT (decl))) |
24180 | gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die); |
24181 | |
24182 | /* The first time through decls_for_scope we will generate the |
24183 | DIEs for the locals. The second time, we fill in the |
24184 | location info. */ |
24185 | decls_for_scope (outer_scope, subr_die); |
24186 | |
24187 | if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5)) |
24188 | { |
24189 | struct call_arg_loc_node *ca_loc; |
24190 | for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next) |
24191 | { |
24192 | dw_die_ref die = NULL; |
24193 | rtx tloc = NULL_RTX, tlocc = NULL_RTX; |
24194 | rtx arg, next_arg; |
24195 | tree arg_decl = NULL_TREE; |
24196 | |
24197 | for (arg = (ca_loc->call_arg_loc_note != NULL_RTX |
24198 | ? XEXP (ca_loc->call_arg_loc_note, 0) |
24199 | : NULL_RTX); |
24200 | arg; arg = next_arg) |
24201 | { |
24202 | dw_loc_descr_ref reg, val; |
24203 | machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1)); |
24204 | dw_die_ref cdie, tdie = NULL; |
24205 | |
24206 | next_arg = XEXP (arg, 1); |
24207 | if (REG_P (XEXP (XEXP (arg, 0), 0)) |
24208 | && next_arg |
24209 | && MEM_P (XEXP (XEXP (next_arg, 0), 0)) |
24210 | && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)) |
24211 | && REGNO (XEXP (XEXP (arg, 0), 0)) |
24212 | == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))) |
24213 | next_arg = XEXP (next_arg, 1); |
24214 | if (mode == VOIDmode) |
24215 | { |
24216 | mode = GET_MODE (XEXP (XEXP (arg, 0), 0)); |
24217 | if (mode == VOIDmode) |
24218 | mode = GET_MODE (XEXP (arg, 0)); |
24219 | } |
24220 | if (mode == VOIDmode || mode == BLKmode) |
24221 | continue; |
24222 | /* Get dynamic information about call target only if we |
24223 | have no static information: we cannot generate both |
24224 | DW_AT_call_origin and DW_AT_call_target |
24225 | attributes. */ |
24226 | if (ca_loc->symbol_ref == NULL_RTX) |
24227 | { |
24228 | if (XEXP (XEXP (arg, 0), 0) == pc_rtx) |
24229 | { |
24230 | tloc = XEXP (XEXP (arg, 0), 1); |
24231 | continue; |
24232 | } |
24233 | else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER |
24234 | && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx) |
24235 | { |
24236 | tlocc = XEXP (XEXP (arg, 0), 1); |
24237 | continue; |
24238 | } |
24239 | } |
24240 | reg = NULL; |
24241 | if (REG_P (XEXP (XEXP (arg, 0), 0))) |
24242 | reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0), |
24243 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24244 | else if (MEM_P (XEXP (XEXP (arg, 0), 0))) |
24245 | { |
24246 | rtx mem = XEXP (XEXP (arg, 0), 0); |
24247 | reg = mem_loc_descriptor (XEXP (mem, 0), |
24248 | mode: get_address_mode (mem), |
24249 | GET_MODE (mem), |
24250 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24251 | } |
24252 | else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) |
24253 | == DEBUG_PARAMETER_REF) |
24254 | { |
24255 | tree tdecl |
24256 | = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0)); |
24257 | tdie = lookup_decl_die (decl: tdecl); |
24258 | if (tdie == NULL) |
24259 | continue; |
24260 | arg_decl = tdecl; |
24261 | } |
24262 | else |
24263 | continue; |
24264 | if (reg == NULL |
24265 | && GET_CODE (XEXP (XEXP (arg, 0), 0)) |
24266 | != DEBUG_PARAMETER_REF) |
24267 | continue; |
24268 | val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode, |
24269 | VOIDmode, |
24270 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24271 | if (val == NULL) |
24272 | continue; |
24273 | if (die == NULL) |
24274 | die = gen_call_site_die (decl, subr_die, ca_loc); |
24275 | cdie = new_die (tag_value: dwarf_TAG (tag: DW_TAG_call_site_parameter), parent_die: die, |
24276 | NULL_TREE); |
24277 | add_desc_attribute (die: cdie, decl: arg_decl); |
24278 | if (reg != NULL) |
24279 | add_AT_loc (die: cdie, attr_kind: DW_AT_location, loc: reg); |
24280 | else if (tdie != NULL) |
24281 | add_AT_die_ref (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_parameter), |
24282 | targ_die: tdie); |
24283 | add_AT_loc (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_value), loc: val); |
24284 | if (next_arg != XEXP (arg, 1)) |
24285 | { |
24286 | mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1)); |
24287 | if (mode == VOIDmode) |
24288 | mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0)); |
24289 | val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1), |
24290 | 0), 1), |
24291 | mode, VOIDmode, |
24292 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24293 | if (val != NULL) |
24294 | add_AT_loc (die: cdie, attr_kind: dwarf_AT (at: DW_AT_call_data_value), |
24295 | loc: val); |
24296 | } |
24297 | } |
24298 | if (die == NULL |
24299 | && (ca_loc->symbol_ref || tloc)) |
24300 | die = gen_call_site_die (decl, subr_die, ca_loc); |
24301 | if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX)) |
24302 | { |
24303 | dw_loc_descr_ref tval = NULL; |
24304 | |
24305 | if (tloc != NULL_RTX) |
24306 | tval = mem_loc_descriptor (rtl: tloc, |
24307 | GET_MODE (tloc) == VOIDmode |
24308 | ? Pmode : GET_MODE (tloc), |
24309 | VOIDmode, |
24310 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24311 | if (tval) |
24312 | add_AT_loc (die, attr_kind: dwarf_AT (at: DW_AT_call_target), loc: tval); |
24313 | else if (tlocc != NULL_RTX) |
24314 | { |
24315 | tval = mem_loc_descriptor (rtl: tlocc, |
24316 | GET_MODE (tlocc) == VOIDmode |
24317 | ? Pmode : GET_MODE (tlocc), |
24318 | VOIDmode, |
24319 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24320 | if (tval) |
24321 | add_AT_loc (die, |
24322 | attr_kind: dwarf_AT (at: DW_AT_call_target_clobbered), |
24323 | loc: tval); |
24324 | } |
24325 | } |
24326 | if (die != NULL) |
24327 | { |
24328 | call_site_note_count++; |
24329 | if (ca_loc->tail_call_p) |
24330 | tail_call_site_note_count++; |
24331 | } |
24332 | } |
24333 | } |
24334 | call_arg_locations = NULL; |
24335 | call_arg_loc_last = NULL; |
24336 | if (tail_call_site_count >= 0 |
24337 | && tail_call_site_count == tail_call_site_note_count |
24338 | && (!dwarf_strict || dwarf_version >= 5)) |
24339 | { |
24340 | if (call_site_count >= 0 |
24341 | && call_site_count == call_site_note_count) |
24342 | add_AT_flag (die: subr_die, attr_kind: dwarf_AT (at: DW_AT_call_all_calls), flag: 1); |
24343 | else |
24344 | add_AT_flag (die: subr_die, attr_kind: dwarf_AT (at: DW_AT_call_all_tail_calls), flag: 1); |
24345 | } |
24346 | call_site_count = -1; |
24347 | tail_call_site_count = -1; |
24348 | } |
24349 | |
24350 | /* Mark used types after we have created DIEs for the functions scopes. */ |
24351 | premark_used_types (DECL_STRUCT_FUNCTION (decl)); |
24352 | } |
24353 | |
24354 | /* Returns a hash value for X (which really is a die_struct). */ |
24355 | |
24356 | hashval_t |
24357 | block_die_hasher::hash (die_struct *d) |
24358 | { |
24359 | return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent); |
24360 | } |
24361 | |
24362 | /* Return true if decl_id and die_parent of die_struct X is the same |
24363 | as decl_id and die_parent of die_struct Y. */ |
24364 | |
24365 | bool |
24366 | block_die_hasher::equal (die_struct *x, die_struct *y) |
24367 | { |
24368 | return x->decl_id == y->decl_id && x->die_parent == y->die_parent; |
24369 | } |
24370 | |
24371 | /* Hold information about markers for inlined entry points. */ |
24372 | struct GTY ((for_user)) inline_entry_data |
24373 | { |
24374 | /* The block that's the inlined_function_outer_scope for an inlined |
24375 | function. */ |
24376 | tree block; |
24377 | |
24378 | /* The label at the inlined entry point. */ |
24379 | const char *label_pfx; |
24380 | unsigned int label_num; |
24381 | |
24382 | /* The view number to be used as the inlined entry point. */ |
24383 | var_loc_view view; |
24384 | }; |
24385 | |
24386 | struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data> |
24387 | { |
24388 | typedef tree compare_type; |
24389 | static inline hashval_t hash (const inline_entry_data *); |
24390 | static inline bool equal (const inline_entry_data *, const_tree); |
24391 | }; |
24392 | |
24393 | /* Hash table routines for inline_entry_data. */ |
24394 | |
24395 | inline hashval_t |
24396 | inline_entry_data_hasher::hash (const inline_entry_data *data) |
24397 | { |
24398 | return htab_hash_pointer (data->block); |
24399 | } |
24400 | |
24401 | inline bool |
24402 | inline_entry_data_hasher::equal (const inline_entry_data *data, |
24403 | const_tree block) |
24404 | { |
24405 | return data->block == block; |
24406 | } |
24407 | |
24408 | /* Inlined entry points pending DIE creation in this compilation unit. */ |
24409 | |
24410 | static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table; |
24411 | |
24412 | |
24413 | /* Return TRUE if DECL, which may have been previously generated as |
24414 | OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is |
24415 | true if decl (or its origin) is either an extern declaration or a |
24416 | class/namespace scoped declaration. |
24417 | |
24418 | The declare_in_namespace support causes us to get two DIEs for one |
24419 | variable, both of which are declarations. We want to avoid |
24420 | considering one to be a specification, so we must test for |
24421 | DECLARATION and DW_AT_declaration. */ |
24422 | static inline bool |
24423 | decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration) |
24424 | { |
24425 | return (old_die && TREE_STATIC (decl) && !declaration |
24426 | && get_AT_flag (die: old_die, attr_kind: DW_AT_declaration) == 1); |
24427 | } |
24428 | |
24429 | /* Return true if DECL is a local static. */ |
24430 | |
24431 | static inline bool |
24432 | local_function_static (tree decl) |
24433 | { |
24434 | gcc_assert (VAR_P (decl)); |
24435 | return TREE_STATIC (decl) |
24436 | && DECL_CONTEXT (decl) |
24437 | && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL; |
24438 | } |
24439 | |
24440 | /* Return true iff DECL overrides (presumably completes) the type of |
24441 | OLD_DIE within CONTEXT_DIE. */ |
24442 | |
24443 | static bool |
24444 | override_type_for_decl_p (tree decl, dw_die_ref old_die, |
24445 | dw_die_ref context_die) |
24446 | { |
24447 | tree type = TREE_TYPE (decl); |
24448 | int cv_quals; |
24449 | |
24450 | if (decl_by_reference_p (decl)) |
24451 | { |
24452 | type = TREE_TYPE (type); |
24453 | cv_quals = TYPE_UNQUALIFIED; |
24454 | } |
24455 | else |
24456 | cv_quals = decl_quals (decl); |
24457 | |
24458 | dw_die_ref type_die = modified_type_die (type, |
24459 | cv_quals: cv_quals | TYPE_QUALS (type), |
24460 | reverse: false, |
24461 | context_die); |
24462 | |
24463 | dw_die_ref old_type_die = get_AT_ref (die: old_die, attr_kind: DW_AT_type); |
24464 | |
24465 | return type_die != old_type_die; |
24466 | } |
24467 | |
24468 | /* Generate a DIE to represent a declared data object. |
24469 | Either DECL or ORIGIN must be non-null. */ |
24470 | |
24471 | static void |
24472 | gen_variable_die (tree decl, tree origin, dw_die_ref context_die) |
24473 | { |
24474 | HOST_WIDE_INT off = 0; |
24475 | tree com_decl; |
24476 | tree decl_or_origin = decl ? decl : origin; |
24477 | tree ultimate_origin; |
24478 | dw_die_ref var_die; |
24479 | dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL; |
24480 | bool declaration = (DECL_EXTERNAL (decl_or_origin) |
24481 | || class_or_namespace_scope_p (context_die)); |
24482 | bool specialization_p = false; |
24483 | bool no_linkage_name = false; |
24484 | |
24485 | /* While C++ inline static data members have definitions inside of the |
24486 | class, force the first DIE to be a declaration, then let gen_member_die |
24487 | reparent it to the class context and call gen_variable_die again |
24488 | to create the outside of the class DIE for the definition. */ |
24489 | if (!declaration |
24490 | && old_die == NULL |
24491 | && decl |
24492 | && DECL_CONTEXT (decl) |
24493 | && TYPE_P (DECL_CONTEXT (decl)) |
24494 | && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1) |
24495 | { |
24496 | declaration = true; |
24497 | if (dwarf_version < 5) |
24498 | no_linkage_name = true; |
24499 | } |
24500 | |
24501 | ultimate_origin = decl_ultimate_origin (decl: decl_or_origin); |
24502 | if (decl || ultimate_origin) |
24503 | origin = ultimate_origin; |
24504 | com_decl = fortran_common (decl: decl_or_origin, value: &off); |
24505 | |
24506 | /* Symbol in common gets emitted as a child of the common block, in the form |
24507 | of a data member. */ |
24508 | if (com_decl) |
24509 | { |
24510 | dw_die_ref com_die; |
24511 | dw_loc_list_ref loc = NULL; |
24512 | die_node com_die_arg; |
24513 | |
24514 | var_die = lookup_decl_die (decl: decl_or_origin); |
24515 | if (var_die) |
24516 | { |
24517 | if (! early_dwarf && get_AT (die: var_die, attr_kind: DW_AT_location) == NULL) |
24518 | { |
24519 | loc = loc_list_from_tree (loc: com_decl, want_address: off ? 1 : 2, NULL); |
24520 | if (loc) |
24521 | { |
24522 | if (off) |
24523 | { |
24524 | /* Optimize the common case. */ |
24525 | if (single_element_loc_list_p (list: loc) |
24526 | && loc->expr->dw_loc_opc == DW_OP_addr |
24527 | && loc->expr->dw_loc_next == NULL |
24528 | && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) |
24529 | == SYMBOL_REF) |
24530 | { |
24531 | rtx x = loc->expr->dw_loc_oprnd1.v.val_addr; |
24532 | loc->expr->dw_loc_oprnd1.v.val_addr |
24533 | = plus_constant (GET_MODE (x), x , off); |
24534 | } |
24535 | else |
24536 | loc_list_plus_const (list_head: loc, offset: off); |
24537 | } |
24538 | add_AT_location_description (die: var_die, attr_kind: DW_AT_location, descr: loc); |
24539 | remove_AT (die: var_die, attr_kind: DW_AT_declaration); |
24540 | } |
24541 | } |
24542 | return; |
24543 | } |
24544 | |
24545 | if (common_block_die_table == NULL) |
24546 | common_block_die_table = hash_table<block_die_hasher>::create_ggc (n: 10); |
24547 | |
24548 | com_die_arg.decl_id = DECL_UID (com_decl); |
24549 | com_die_arg.die_parent = context_die; |
24550 | com_die = common_block_die_table->find (value: &com_die_arg); |
24551 | if (! early_dwarf) |
24552 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
24553 | if (com_die == NULL) |
24554 | { |
24555 | const char *cnam |
24556 | = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl)); |
24557 | die_node **slot; |
24558 | |
24559 | com_die = new_die (tag_value: DW_TAG_common_block, parent_die: context_die, t: decl); |
24560 | add_name_and_src_coords_attributes (die: com_die, decl: com_decl); |
24561 | if (loc) |
24562 | { |
24563 | add_AT_location_description (die: com_die, attr_kind: DW_AT_location, descr: loc); |
24564 | /* Avoid sharing the same loc descriptor between |
24565 | DW_TAG_common_block and DW_TAG_variable. */ |
24566 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
24567 | } |
24568 | else if (DECL_EXTERNAL (decl_or_origin)) |
24569 | add_AT_flag (die: com_die, attr_kind: DW_AT_declaration, flag: 1); |
24570 | if (want_pubnames ()) |
24571 | add_pubname_string (str: cnam, die: com_die); /* ??? needed? */ |
24572 | com_die->decl_id = DECL_UID (com_decl); |
24573 | slot = common_block_die_table->find_slot (value: com_die, insert: INSERT); |
24574 | *slot = com_die; |
24575 | } |
24576 | else if (get_AT (die: com_die, attr_kind: DW_AT_location) == NULL && loc) |
24577 | { |
24578 | add_AT_location_description (die: com_die, attr_kind: DW_AT_location, descr: loc); |
24579 | loc = loc_list_from_tree (loc: com_decl, want_address: 2, NULL); |
24580 | remove_AT (die: com_die, attr_kind: DW_AT_declaration); |
24581 | } |
24582 | var_die = new_die (tag_value: DW_TAG_variable, parent_die: com_die, t: decl); |
24583 | add_name_and_src_coords_attributes (die: var_die, decl: decl_or_origin); |
24584 | add_type_attribute (object_die: var_die, TREE_TYPE (decl_or_origin), |
24585 | cv_quals: decl_quals (decl: decl_or_origin), reverse: false, |
24586 | context_die); |
24587 | add_alignment_attribute (die: var_die, tree_node: decl); |
24588 | add_AT_flag (die: var_die, attr_kind: DW_AT_external, flag: 1); |
24589 | if (loc) |
24590 | { |
24591 | if (off) |
24592 | { |
24593 | /* Optimize the common case. */ |
24594 | if (single_element_loc_list_p (list: loc) |
24595 | && loc->expr->dw_loc_opc == DW_OP_addr |
24596 | && loc->expr->dw_loc_next == NULL |
24597 | && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF) |
24598 | { |
24599 | rtx x = loc->expr->dw_loc_oprnd1.v.val_addr; |
24600 | loc->expr->dw_loc_oprnd1.v.val_addr |
24601 | = plus_constant (GET_MODE (x), x, off); |
24602 | } |
24603 | else |
24604 | loc_list_plus_const (list_head: loc, offset: off); |
24605 | } |
24606 | add_AT_location_description (die: var_die, attr_kind: DW_AT_location, descr: loc); |
24607 | } |
24608 | else if (DECL_EXTERNAL (decl_or_origin)) |
24609 | add_AT_flag (die: var_die, attr_kind: DW_AT_declaration, flag: 1); |
24610 | if (decl) |
24611 | equate_decl_number_to_die (decl, decl_die: var_die); |
24612 | return; |
24613 | } |
24614 | |
24615 | if (old_die) |
24616 | { |
24617 | if (declaration) |
24618 | { |
24619 | /* A declaration that has been previously dumped, needs no |
24620 | further annotations, since it doesn't need location on |
24621 | the second pass. */ |
24622 | return; |
24623 | } |
24624 | else if (decl_will_get_specification_p (old_die, decl, declaration) |
24625 | && !get_AT (die: old_die, attr_kind: DW_AT_specification)) |
24626 | { |
24627 | /* Fall-thru so we can make a new variable die along with a |
24628 | DW_AT_specification. */ |
24629 | } |
24630 | else if (origin && old_die->die_parent != context_die) |
24631 | { |
24632 | /* If we will be creating an inlined instance, we need a |
24633 | new DIE that will get annotated with |
24634 | DW_AT_abstract_origin. */ |
24635 | gcc_assert (!DECL_ABSTRACT_P (decl)); |
24636 | } |
24637 | else |
24638 | { |
24639 | /* If a DIE was dumped early, it still needs location info. |
24640 | Skip to where we fill the location bits. */ |
24641 | var_die = old_die; |
24642 | |
24643 | /* ??? In LTRANS we cannot annotate early created variably |
24644 | modified type DIEs without copying them and adjusting all |
24645 | references to them. Thus we dumped them again. Also add a |
24646 | reference to them but beware of -g0 compile and -g link |
24647 | in which case the reference will be already present. */ |
24648 | tree type = TREE_TYPE (decl_or_origin); |
24649 | if (in_lto_p |
24650 | && ! get_AT (die: var_die, attr_kind: DW_AT_type) |
24651 | && variably_modified_type_p |
24652 | (type, decl_function_context (decl_or_origin))) |
24653 | { |
24654 | if (decl_by_reference_p (decl: decl_or_origin)) |
24655 | add_type_attribute (object_die: var_die, TREE_TYPE (type), |
24656 | cv_quals: TYPE_UNQUALIFIED, reverse: false, context_die); |
24657 | else |
24658 | add_type_attribute (object_die: var_die, type, cv_quals: decl_quals (decl: decl_or_origin), |
24659 | reverse: false, context_die); |
24660 | } |
24661 | |
24662 | goto gen_variable_die_location; |
24663 | } |
24664 | } |
24665 | |
24666 | /* For static data members, the declaration in the class is supposed |
24667 | to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility |
24668 | also in DWARF2; the specification should still be DW_TAG_variable |
24669 | referencing the DW_TAG_member DIE. */ |
24670 | if (declaration && class_scope_p (context_die) && dwarf_version < 5) |
24671 | var_die = new_die (tag_value: DW_TAG_member, parent_die: context_die, t: decl); |
24672 | else |
24673 | var_die = new_die (tag_value: DW_TAG_variable, parent_die: context_die, t: decl); |
24674 | |
24675 | if (origin != NULL) |
24676 | add_abstract_origin_attribute (die: var_die, origin); |
24677 | |
24678 | /* Loop unrolling can create multiple blocks that refer to the same |
24679 | static variable, so we must test for the DW_AT_declaration flag. |
24680 | |
24681 | ??? Loop unrolling/reorder_blocks should perhaps be rewritten to |
24682 | copy decls and set the DECL_ABSTRACT_P flag on them instead of |
24683 | sharing them. |
24684 | |
24685 | ??? Duplicated blocks have been rewritten to use .debug_ranges. */ |
24686 | else if (decl_will_get_specification_p (old_die, decl, declaration)) |
24687 | { |
24688 | /* This is a definition of a C++ class level static. */ |
24689 | add_AT_specification (die: var_die, targ_die: old_die); |
24690 | specialization_p = true; |
24691 | if (DECL_NAME (decl)) |
24692 | { |
24693 | expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl)); |
24694 | struct dwarf_file_data * file_index = lookup_filename (s.file); |
24695 | |
24696 | if (get_AT_file (die: old_die, attr_kind: DW_AT_decl_file) != file_index) |
24697 | add_AT_file (die: var_die, attr_kind: DW_AT_decl_file, fd: file_index); |
24698 | |
24699 | if (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_line) != (unsigned) s.line) |
24700 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_decl_line, unsigned_val: s.line); |
24701 | |
24702 | if (debug_column_info |
24703 | && s.column |
24704 | && (get_AT_unsigned (die: old_die, attr_kind: DW_AT_decl_column) |
24705 | != (unsigned) s.column)) |
24706 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_decl_column, unsigned_val: s.column); |
24707 | |
24708 | if (old_die->die_tag == DW_TAG_member) |
24709 | add_linkage_name (die: var_die, decl); |
24710 | } |
24711 | } |
24712 | else |
24713 | add_name_and_src_coords_attributes (die: var_die, decl, no_linkage_name); |
24714 | |
24715 | if ((origin == NULL && !specialization_p) |
24716 | || (origin != NULL |
24717 | && !DECL_ABSTRACT_P (decl_or_origin) |
24718 | && variably_modified_type_p (TREE_TYPE (decl_or_origin), |
24719 | decl_function_context |
24720 | (decl_or_origin))) |
24721 | || (old_die && specialization_p |
24722 | && override_type_for_decl_p (decl: decl_or_origin, old_die, context_die))) |
24723 | { |
24724 | tree type = TREE_TYPE (decl_or_origin); |
24725 | |
24726 | if (decl_by_reference_p (decl: decl_or_origin)) |
24727 | add_type_attribute (object_die: var_die, TREE_TYPE (type), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
24728 | context_die); |
24729 | else |
24730 | add_type_attribute (object_die: var_die, type, cv_quals: decl_quals (decl: decl_or_origin), reverse: false, |
24731 | context_die); |
24732 | } |
24733 | |
24734 | if (origin == NULL && !specialization_p) |
24735 | { |
24736 | if (TREE_PUBLIC (decl)) |
24737 | add_AT_flag (die: var_die, attr_kind: DW_AT_external, flag: 1); |
24738 | |
24739 | if (DECL_ARTIFICIAL (decl)) |
24740 | add_AT_flag (die: var_die, attr_kind: DW_AT_artificial, flag: 1); |
24741 | |
24742 | add_alignment_attribute (die: var_die, tree_node: decl); |
24743 | |
24744 | add_accessibility_attribute (die: var_die, decl); |
24745 | } |
24746 | |
24747 | if (declaration) |
24748 | add_AT_flag (die: var_die, attr_kind: DW_AT_declaration, flag: 1); |
24749 | |
24750 | if (decl && (DECL_ABSTRACT_P (decl) |
24751 | || !old_die || is_declaration_die (die: old_die))) |
24752 | equate_decl_number_to_die (decl, decl_die: var_die); |
24753 | |
24754 | gen_variable_die_location: |
24755 | if (! declaration |
24756 | && (! DECL_ABSTRACT_P (decl_or_origin) |
24757 | /* Local static vars are shared between all clones/inlines, |
24758 | so emit DW_AT_location on the abstract DIE if DECL_RTL is |
24759 | already set. */ |
24760 | || (VAR_P (decl_or_origin) |
24761 | && TREE_STATIC (decl_or_origin) |
24762 | && DECL_RTL_SET_P (decl_or_origin)))) |
24763 | { |
24764 | if (early_dwarf) |
24765 | { |
24766 | add_pubname (decl: decl_or_origin, die: var_die); |
24767 | /* For global register variables, emit DW_AT_location if possible |
24768 | already during early_dwarf, as late_global_decl won't be usually |
24769 | called. */ |
24770 | if (DECL_HARD_REGISTER (decl_or_origin) |
24771 | && TREE_STATIC (decl_or_origin) |
24772 | && !decl_by_reference_p (decl: decl_or_origin) |
24773 | && !get_AT (die: var_die, attr_kind: DW_AT_location) |
24774 | && !get_AT (die: var_die, attr_kind: DW_AT_const_value) |
24775 | && DECL_RTL_SET_P (decl_or_origin) |
24776 | && REG_P (DECL_RTL (decl_or_origin))) |
24777 | { |
24778 | dw_loc_descr_ref descr |
24779 | = reg_loc_descriptor (DECL_RTL (decl_or_origin), |
24780 | initialized: VAR_INIT_STATUS_INITIALIZED); |
24781 | if (descr) |
24782 | add_AT_loc (die: var_die, attr_kind: DW_AT_location, loc: descr); |
24783 | } |
24784 | } |
24785 | else |
24786 | add_location_or_const_value_attribute (die: var_die, decl: decl_or_origin, |
24787 | cache_p: decl == NULL); |
24788 | } |
24789 | else |
24790 | tree_add_const_value_attribute_for_decl (var_die, decl: decl_or_origin); |
24791 | |
24792 | if ((dwarf_version >= 4 || !dwarf_strict) |
24793 | && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin, |
24794 | DW_AT_const_expr) == 1 |
24795 | && !get_AT (die: var_die, attr_kind: DW_AT_const_expr) |
24796 | && !specialization_p) |
24797 | add_AT_flag (die: var_die, attr_kind: DW_AT_const_expr, flag: 1); |
24798 | |
24799 | if (!dwarf_strict) |
24800 | { |
24801 | int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin, |
24802 | DW_AT_inline); |
24803 | if (inl != -1 |
24804 | && !get_AT (die: var_die, attr_kind: DW_AT_inline) |
24805 | && !specialization_p) |
24806 | add_AT_unsigned (die: var_die, attr_kind: DW_AT_inline, unsigned_val: inl); |
24807 | } |
24808 | } |
24809 | |
24810 | /* Generate a DIE to represent a named constant. */ |
24811 | |
24812 | static void |
24813 | gen_const_die (tree decl, dw_die_ref context_die) |
24814 | { |
24815 | dw_die_ref const_die; |
24816 | tree type = TREE_TYPE (decl); |
24817 | |
24818 | const_die = lookup_decl_die (decl); |
24819 | if (const_die) |
24820 | return; |
24821 | |
24822 | const_die = new_die (tag_value: DW_TAG_constant, parent_die: context_die, t: decl); |
24823 | equate_decl_number_to_die (decl, decl_die: const_die); |
24824 | add_name_and_src_coords_attributes (die: const_die, decl); |
24825 | add_type_attribute (object_die: const_die, type, cv_quals: TYPE_QUAL_CONST, reverse: false, context_die); |
24826 | if (TREE_PUBLIC (decl)) |
24827 | add_AT_flag (die: const_die, attr_kind: DW_AT_external, flag: 1); |
24828 | if (DECL_ARTIFICIAL (decl)) |
24829 | add_AT_flag (die: const_die, attr_kind: DW_AT_artificial, flag: 1); |
24830 | tree_add_const_value_attribute_for_decl (var_die: const_die, decl); |
24831 | } |
24832 | |
24833 | /* Generate a DIE to represent a label identifier. */ |
24834 | |
24835 | static void |
24836 | gen_label_die (tree decl, dw_die_ref context_die) |
24837 | { |
24838 | tree origin = decl_ultimate_origin (decl); |
24839 | dw_die_ref lbl_die = lookup_decl_die (decl); |
24840 | rtx insn; |
24841 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
24842 | |
24843 | if (!lbl_die) |
24844 | { |
24845 | lbl_die = new_die (tag_value: DW_TAG_label, parent_die: context_die, t: decl); |
24846 | equate_decl_number_to_die (decl, decl_die: lbl_die); |
24847 | |
24848 | if (origin != NULL) |
24849 | add_abstract_origin_attribute (die: lbl_die, origin); |
24850 | else |
24851 | add_name_and_src_coords_attributes (die: lbl_die, decl); |
24852 | } |
24853 | |
24854 | if (DECL_ABSTRACT_P (decl)) |
24855 | equate_decl_number_to_die (decl, decl_die: lbl_die); |
24856 | else if (! early_dwarf) |
24857 | { |
24858 | insn = DECL_RTL_IF_SET (decl); |
24859 | |
24860 | /* Deleted labels are programmer specified labels which have been |
24861 | eliminated because of various optimizations. We still emit them |
24862 | here so that it is possible to put breakpoints on them. */ |
24863 | if (insn |
24864 | && (LABEL_P (insn) |
24865 | || ((NOTE_P (insn) |
24866 | && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))) |
24867 | { |
24868 | /* When optimization is enabled (via -O) some parts of the compiler |
24869 | (e.g. jump.cc and cse.cc) may try to delete CODE_LABEL insns which |
24870 | represent source-level labels which were explicitly declared by |
24871 | the user. This really shouldn't be happening though, so catch |
24872 | it if it ever does happen. */ |
24873 | gcc_assert (!as_a<rtx_insn *> (insn)->deleted ()); |
24874 | |
24875 | ASM_GENERATE_INTERNAL_LABEL (label, "L" , CODE_LABEL_NUMBER (insn)); |
24876 | add_AT_lbl_id (die: lbl_die, attr_kind: DW_AT_low_pc, lbl_id: label); |
24877 | } |
24878 | else if (insn |
24879 | && NOTE_P (insn) |
24880 | && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL |
24881 | && CODE_LABEL_NUMBER (insn) != -1) |
24882 | { |
24883 | ASM_GENERATE_INTERNAL_LABEL (label, "LDL" , CODE_LABEL_NUMBER (insn)); |
24884 | add_AT_lbl_id (die: lbl_die, attr_kind: DW_AT_low_pc, lbl_id: label); |
24885 | } |
24886 | } |
24887 | } |
24888 | |
24889 | /* A helper function for gen_inlined_subroutine_die. Add source coordinate |
24890 | attributes to the DIE for a block STMT, to describe where the inlined |
24891 | function was called from. This is similar to add_src_coords_attributes. */ |
24892 | |
24893 | static inline void |
24894 | add_call_src_coords_attributes (tree stmt, dw_die_ref die) |
24895 | { |
24896 | /* We can end up with BUILTINS_LOCATION here. */ |
24897 | if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt))) |
24898 | return; |
24899 | |
24900 | location_t locus = BLOCK_SOURCE_LOCATION (stmt); |
24901 | expanded_location s = expand_location (locus); |
24902 | |
24903 | if (dwarf_version >= 3 || !dwarf_strict) |
24904 | { |
24905 | add_AT_file (die, attr_kind: DW_AT_call_file, fd: lookup_filename (s.file)); |
24906 | add_AT_unsigned (die, attr_kind: DW_AT_call_line, unsigned_val: s.line); |
24907 | if (debug_column_info && s.column) |
24908 | add_AT_unsigned (die, attr_kind: DW_AT_call_column, unsigned_val: s.column); |
24909 | unsigned discr = get_discriminator_from_loc (locus); |
24910 | if (discr != 0) |
24911 | add_AT_unsigned (die, attr_kind: DW_AT_GNU_discriminator, unsigned_val: discr); |
24912 | } |
24913 | } |
24914 | |
24915 | |
24916 | /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die. |
24917 | Add low_pc and high_pc attributes to the DIE for a block STMT. */ |
24918 | |
24919 | static inline void |
24920 | add_high_low_attributes (tree stmt, dw_die_ref die) |
24921 | { |
24922 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
24923 | |
24924 | if (inline_entry_data **iedp |
24925 | = !inline_entry_data_table ? NULL |
24926 | : inline_entry_data_table->find_slot_with_hash (comparable: stmt, |
24927 | hash: htab_hash_pointer (stmt), |
24928 | insert: NO_INSERT)) |
24929 | { |
24930 | inline_entry_data *ied = *iedp; |
24931 | gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS); |
24932 | gcc_assert (debug_inline_points); |
24933 | gcc_assert (inlined_function_outer_scope_p (stmt)); |
24934 | |
24935 | ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num); |
24936 | add_AT_lbl_id (die, attr_kind: DW_AT_entry_pc, lbl_id: label); |
24937 | |
24938 | if (debug_variable_location_views && !ZERO_VIEW_P (ied->view) |
24939 | && !dwarf_strict) |
24940 | { |
24941 | if (!output_asm_line_debug_info ()) |
24942 | add_AT_unsigned (die, attr_kind: DW_AT_GNU_entry_view, unsigned_val: ied->view); |
24943 | else |
24944 | { |
24945 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , ied->view); |
24946 | /* FIXME: this will resolve to a small number. Could we |
24947 | possibly emit smaller data? Ideally we'd emit a |
24948 | uleb128, but that would make the size of DIEs |
24949 | impossible for the compiler to compute, since it's |
24950 | the assembler that computes the value of the view |
24951 | label in this case. Ideally, we'd have a single form |
24952 | encompassing both the address and the view, and |
24953 | indirecting them through a table might make things |
24954 | easier, but even that would be more wasteful, |
24955 | space-wise, than what we have now. */ |
24956 | add_AT_symview (die, attr_kind: DW_AT_GNU_entry_view, view_label: label); |
24957 | } |
24958 | } |
24959 | |
24960 | inline_entry_data_table->clear_slot (slot: iedp); |
24961 | } |
24962 | |
24963 | if (BLOCK_FRAGMENT_CHAIN (stmt) |
24964 | && (dwarf_version >= 3 || !dwarf_strict)) |
24965 | { |
24966 | tree chain, superblock = NULL_TREE; |
24967 | dw_die_ref pdie; |
24968 | dw_attr_node *attr = NULL; |
24969 | |
24970 | if (!debug_inline_points && inlined_function_outer_scope_p (block: stmt)) |
24971 | { |
24972 | ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL, |
24973 | BLOCK_NUMBER (stmt)); |
24974 | add_AT_lbl_id (die, attr_kind: DW_AT_entry_pc, lbl_id: label); |
24975 | } |
24976 | |
24977 | /* Optimize duplicate .debug_ranges lists or even tails of |
24978 | lists. If this BLOCK has same ranges as its supercontext, |
24979 | lookup DW_AT_ranges attribute in the supercontext (and |
24980 | recursively so), verify that the ranges_table contains the |
24981 | right values and use it instead of adding a new .debug_range. */ |
24982 | for (chain = stmt, pdie = die; |
24983 | BLOCK_SAME_RANGE (chain); |
24984 | chain = BLOCK_SUPERCONTEXT (chain)) |
24985 | { |
24986 | dw_attr_node *new_attr; |
24987 | |
24988 | pdie = pdie->die_parent; |
24989 | if (pdie == NULL) |
24990 | break; |
24991 | if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE) |
24992 | break; |
24993 | new_attr = get_AT (die: pdie, attr_kind: DW_AT_ranges); |
24994 | if (new_attr == NULL |
24995 | || new_attr->dw_attr_val.val_class != dw_val_class_range_list) |
24996 | break; |
24997 | attr = new_attr; |
24998 | superblock = BLOCK_SUPERCONTEXT (chain); |
24999 | } |
25000 | if (attr != NULL |
25001 | && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num |
25002 | == (int)BLOCK_NUMBER (superblock)) |
25003 | && BLOCK_FRAGMENT_CHAIN (superblock)) |
25004 | { |
25005 | unsigned long off = attr->dw_attr_val.v.val_offset; |
25006 | unsigned long supercnt = 0, thiscnt = 0; |
25007 | for (chain = BLOCK_FRAGMENT_CHAIN (superblock); |
25008 | chain; chain = BLOCK_FRAGMENT_CHAIN (chain)) |
25009 | { |
25010 | ++supercnt; |
25011 | gcc_checking_assert ((*ranges_table)[off + supercnt].num |
25012 | == (int)BLOCK_NUMBER (chain)); |
25013 | } |
25014 | gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0); |
25015 | for (chain = BLOCK_FRAGMENT_CHAIN (stmt); |
25016 | chain; chain = BLOCK_FRAGMENT_CHAIN (chain)) |
25017 | ++thiscnt; |
25018 | gcc_assert (supercnt >= thiscnt); |
25019 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset: off + supercnt - thiscnt, |
25020 | force_direct: false); |
25021 | note_rnglist_head (offset: off + supercnt - thiscnt); |
25022 | return; |
25023 | } |
25024 | |
25025 | unsigned int offset = add_ranges (block: stmt, maybe_new_sec: true); |
25026 | add_AT_range_list (die, attr_kind: DW_AT_ranges, offset, force_direct: false); |
25027 | note_rnglist_head (offset); |
25028 | |
25029 | bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt); |
25030 | chain = BLOCK_FRAGMENT_CHAIN (stmt); |
25031 | do |
25032 | { |
25033 | add_ranges (block: chain, maybe_new_sec: prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain)); |
25034 | prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain); |
25035 | chain = BLOCK_FRAGMENT_CHAIN (chain); |
25036 | } |
25037 | while (chain); |
25038 | add_ranges (NULL); |
25039 | } |
25040 | else |
25041 | { |
25042 | char label_high[MAX_ARTIFICIAL_LABEL_BYTES]; |
25043 | ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL, |
25044 | BLOCK_NUMBER (stmt)); |
25045 | ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL, |
25046 | BLOCK_NUMBER (stmt)); |
25047 | add_AT_low_high_pc (die, lbl_low: label, lbl_high: label_high, force_direct: false); |
25048 | } |
25049 | } |
25050 | |
25051 | /* Generate a DIE for a lexical block. */ |
25052 | |
25053 | static void |
25054 | gen_lexical_block_die (tree stmt, dw_die_ref context_die) |
25055 | { |
25056 | dw_die_ref old_die = lookup_block_die (block: stmt); |
25057 | dw_die_ref stmt_die = NULL; |
25058 | if (!old_die) |
25059 | { |
25060 | stmt_die = new_die (tag_value: DW_TAG_lexical_block, parent_die: context_die, t: stmt); |
25061 | equate_block_to_die (block: stmt, die: stmt_die); |
25062 | } |
25063 | |
25064 | if (BLOCK_ABSTRACT_ORIGIN (stmt)) |
25065 | { |
25066 | /* If this is an inlined or conrecte instance, create a new lexical |
25067 | die for anything below to attach DW_AT_abstract_origin to. */ |
25068 | if (old_die) |
25069 | stmt_die = new_die (tag_value: DW_TAG_lexical_block, parent_die: context_die, t: stmt); |
25070 | |
25071 | tree origin = block_ultimate_origin (stmt); |
25072 | if (origin != NULL_TREE && (origin != stmt || old_die)) |
25073 | add_abstract_origin_attribute (die: stmt_die, origin); |
25074 | |
25075 | old_die = NULL; |
25076 | } |
25077 | |
25078 | if (old_die) |
25079 | stmt_die = old_die; |
25080 | |
25081 | /* A non abstract block whose blocks have already been reordered |
25082 | should have the instruction range for this block. If so, set the |
25083 | high/low attributes. */ |
25084 | if (!early_dwarf && TREE_ASM_WRITTEN (stmt)) |
25085 | { |
25086 | gcc_assert (stmt_die); |
25087 | add_high_low_attributes (stmt, die: stmt_die); |
25088 | } |
25089 | |
25090 | decls_for_scope (stmt, stmt_die); |
25091 | } |
25092 | |
25093 | /* Generate a DIE for an inlined subprogram. */ |
25094 | |
25095 | static void |
25096 | gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die) |
25097 | { |
25098 | tree decl = block_ultimate_origin (stmt); |
25099 | |
25100 | /* Make sure any inlined functions are known to be inlineable. */ |
25101 | gcc_checking_assert (DECL_ABSTRACT_P (decl) |
25102 | || cgraph_function_possibly_inlined_p (decl)); |
25103 | |
25104 | dw_die_ref subr_die = new_die (tag_value: DW_TAG_inlined_subroutine, parent_die: context_die, t: stmt); |
25105 | |
25106 | if (call_arg_locations || debug_inline_points) |
25107 | equate_block_to_die (block: stmt, die: subr_die); |
25108 | add_abstract_origin_attribute (die: subr_die, origin: decl); |
25109 | if (TREE_ASM_WRITTEN (stmt)) |
25110 | add_high_low_attributes (stmt, die: subr_die); |
25111 | add_call_src_coords_attributes (stmt, die: subr_die); |
25112 | |
25113 | /* The inliner creates an extra BLOCK for the parameter setup, |
25114 | we want to merge that with the actual outermost BLOCK of the |
25115 | inlined function to avoid duplicate locals in consumers. |
25116 | Do that by doing the recursion to subblocks on the single subblock |
25117 | of STMT. */ |
25118 | bool unwrap_one = false; |
25119 | if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt))) |
25120 | { |
25121 | tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt)); |
25122 | if (origin |
25123 | && TREE_CODE (origin) == BLOCK |
25124 | && BLOCK_SUPERCONTEXT (origin) == decl) |
25125 | unwrap_one = true; |
25126 | } |
25127 | decls_for_scope (stmt, subr_die, !unwrap_one); |
25128 | if (unwrap_one) |
25129 | decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die); |
25130 | } |
25131 | |
25132 | /* Generate a DIE for a field in a record, or structure. CTX is required: see |
25133 | the comment for VLR_CONTEXT. */ |
25134 | |
25135 | static void |
25136 | gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die) |
25137 | { |
25138 | dw_die_ref decl_die; |
25139 | |
25140 | if (TREE_TYPE (decl) == error_mark_node) |
25141 | return; |
25142 | |
25143 | decl_die = new_die (tag_value: DW_TAG_member, parent_die: context_die, t: decl); |
25144 | add_name_and_src_coords_attributes (die: decl_die, decl); |
25145 | add_type_attribute (object_die: decl_die, type: member_declared_type (member: decl), cv_quals: decl_quals (decl), |
25146 | TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)), |
25147 | context_die); |
25148 | |
25149 | if (DECL_BIT_FIELD_TYPE (decl)) |
25150 | { |
25151 | add_byte_size_attribute (die: decl_die, tree_node: decl); |
25152 | add_bit_size_attribute (die: decl_die, decl); |
25153 | add_bit_offset_attribute (die: decl_die, decl); |
25154 | } |
25155 | |
25156 | add_alignment_attribute (die: decl_die, tree_node: decl); |
25157 | |
25158 | if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE) |
25159 | add_data_member_location_attribute (die: decl_die, decl, ctx); |
25160 | |
25161 | if (DECL_ARTIFICIAL (decl)) |
25162 | add_AT_flag (die: decl_die, attr_kind: DW_AT_artificial, flag: 1); |
25163 | |
25164 | add_accessibility_attribute (die: decl_die, decl); |
25165 | |
25166 | /* Add DW_AT_export_symbols to anonymous unions or structs. */ |
25167 | if ((dwarf_version >= 5 || !dwarf_strict) && DECL_NAME (decl) == NULL_TREE) |
25168 | if (tree type = member_declared_type (member: decl)) |
25169 | if (lang_hooks.types.type_dwarf_attribute (TYPE_MAIN_VARIANT (type), |
25170 | DW_AT_export_symbols) != -1) |
25171 | { |
25172 | dw_die_ref type_die = lookup_type_die (TYPE_MAIN_VARIANT (type)); |
25173 | if (type_die && get_AT (die: type_die, attr_kind: DW_AT_export_symbols) == NULL) |
25174 | add_AT_flag (die: type_die, attr_kind: DW_AT_export_symbols, flag: 1); |
25175 | } |
25176 | |
25177 | /* Equate decl number to die, so that we can look up this decl later on. */ |
25178 | equate_decl_number_to_die (decl, decl_die); |
25179 | } |
25180 | |
25181 | /* Generate a DIE for a pointer to a member type. TYPE can be an |
25182 | OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a |
25183 | pointer to member function. */ |
25184 | |
25185 | static void |
25186 | gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die) |
25187 | { |
25188 | if (lookup_type_die (type)) |
25189 | return; |
25190 | |
25191 | dw_die_ref ptr_die = new_die (tag_value: DW_TAG_ptr_to_member_type, |
25192 | parent_die: scope_die_for (t: type, context_die), t: type); |
25193 | |
25194 | equate_type_number_to_die (type, type_die: ptr_die); |
25195 | add_AT_die_ref (die: ptr_die, attr_kind: DW_AT_containing_type, |
25196 | targ_die: lookup_type_die (TYPE_OFFSET_BASETYPE (type))); |
25197 | add_type_attribute (object_die: ptr_die, TREE_TYPE (type), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
25198 | context_die); |
25199 | add_alignment_attribute (die: ptr_die, tree_node: type); |
25200 | |
25201 | if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE |
25202 | && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE) |
25203 | { |
25204 | dw_loc_descr_ref op = new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0); |
25205 | add_AT_loc (die: ptr_die, attr_kind: DW_AT_use_location, loc: op); |
25206 | } |
25207 | } |
25208 | |
25209 | static char *producer_string; |
25210 | |
25211 | /* Given a C and/or C++ language/version string return the "highest". |
25212 | C++ is assumed to be "higher" than C in this case. Used for merging |
25213 | LTO translation unit languages. */ |
25214 | static const char * |
25215 | highest_c_language (const char *lang1, const char *lang2) |
25216 | { |
25217 | if (strcmp (s1: "GNU C++26" , s2: lang1) == 0 || strcmp (s1: "GNU C++26" , s2: lang2) == 0) |
25218 | return "GNU C++26" ; |
25219 | if (strcmp (s1: "GNU C++23" , s2: lang1) == 0 || strcmp (s1: "GNU C++23" , s2: lang2) == 0) |
25220 | return "GNU C++23" ; |
25221 | if (strcmp (s1: "GNU C++20" , s2: lang1) == 0 || strcmp (s1: "GNU C++20" , s2: lang2) == 0) |
25222 | return "GNU C++20" ; |
25223 | if (strcmp (s1: "GNU C++17" , s2: lang1) == 0 || strcmp (s1: "GNU C++17" , s2: lang2) == 0) |
25224 | return "GNU C++17" ; |
25225 | if (strcmp (s1: "GNU C++14" , s2: lang1) == 0 || strcmp (s1: "GNU C++14" , s2: lang2) == 0) |
25226 | return "GNU C++14" ; |
25227 | if (strcmp (s1: "GNU C++11" , s2: lang1) == 0 || strcmp (s1: "GNU C++11" , s2: lang2) == 0) |
25228 | return "GNU C++11" ; |
25229 | if (strcmp (s1: "GNU C++98" , s2: lang1) == 0 || strcmp (s1: "GNU C++98" , s2: lang2) == 0) |
25230 | return "GNU C++98" ; |
25231 | |
25232 | if (strcmp (s1: "GNU C23" , s2: lang1) == 0 || strcmp (s1: "GNU C23" , s2: lang2) == 0) |
25233 | return "GNU C23" ; |
25234 | if (strcmp (s1: "GNU C17" , s2: lang1) == 0 || strcmp (s1: "GNU C17" , s2: lang2) == 0) |
25235 | return "GNU C17" ; |
25236 | if (strcmp (s1: "GNU C11" , s2: lang1) == 0 || strcmp (s1: "GNU C11" , s2: lang2) == 0) |
25237 | return "GNU C11" ; |
25238 | if (strcmp (s1: "GNU C99" , s2: lang1) == 0 || strcmp (s1: "GNU C99" , s2: lang2) == 0) |
25239 | return "GNU C99" ; |
25240 | if (strcmp (s1: "GNU C89" , s2: lang1) == 0 || strcmp (s1: "GNU C89" , s2: lang2) == 0) |
25241 | return "GNU C89" ; |
25242 | |
25243 | gcc_unreachable (); |
25244 | } |
25245 | |
25246 | |
25247 | /* Generate the DIE for the compilation unit. */ |
25248 | |
25249 | static dw_die_ref |
25250 | gen_compile_unit_die (const char *filename) |
25251 | { |
25252 | dw_die_ref die; |
25253 | const char *language_string = lang_hooks.name; |
25254 | int language; |
25255 | |
25256 | die = new_die (tag_value: DW_TAG_compile_unit, NULL, NULL); |
25257 | |
25258 | if (filename) |
25259 | { |
25260 | add_filename_attribute (die, name_string: filename); |
25261 | /* Don't add cwd for <built-in>. */ |
25262 | if (filename[0] != '<') |
25263 | add_comp_dir_attribute (die); |
25264 | } |
25265 | |
25266 | add_AT_string (die, attr_kind: DW_AT_producer, str: producer_string ? producer_string : "" ); |
25267 | |
25268 | /* If our producer is LTO try to figure out a common language to use |
25269 | from the global list of translation units. */ |
25270 | if (strcmp (s1: language_string, s2: "GNU GIMPLE" ) == 0) |
25271 | { |
25272 | unsigned i; |
25273 | tree t; |
25274 | const char *common_lang = NULL; |
25275 | |
25276 | FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t) |
25277 | { |
25278 | if (!TRANSLATION_UNIT_LANGUAGE (t)) |
25279 | continue; |
25280 | if (!common_lang) |
25281 | common_lang = TRANSLATION_UNIT_LANGUAGE (t); |
25282 | else if (strcmp (s1: common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0) |
25283 | ; |
25284 | else if (startswith (str: common_lang, prefix: "GNU C" ) |
25285 | && startswith (TRANSLATION_UNIT_LANGUAGE (t), prefix: "GNU C" )) |
25286 | /* Mixing C and C++ is ok, use C++ in that case. */ |
25287 | common_lang = highest_c_language (lang1: common_lang, |
25288 | TRANSLATION_UNIT_LANGUAGE (t)); |
25289 | else |
25290 | { |
25291 | /* Fall back to C. */ |
25292 | common_lang = NULL; |
25293 | break; |
25294 | } |
25295 | } |
25296 | |
25297 | if (common_lang) |
25298 | language_string = common_lang; |
25299 | } |
25300 | |
25301 | language = DW_LANG_C; |
25302 | if (startswith (str: language_string, prefix: "GNU C" ) |
25303 | && ISDIGIT (language_string[5])) |
25304 | { |
25305 | language = DW_LANG_C89; |
25306 | if (dwarf_version >= 3 || !dwarf_strict) |
25307 | { |
25308 | if (strcmp (s1: language_string, s2: "GNU C89" ) != 0) |
25309 | language = DW_LANG_C99; |
25310 | |
25311 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
25312 | if (strcmp (s1: language_string, s2: "GNU C11" ) == 0 |
25313 | || strcmp (s1: language_string, s2: "GNU C17" ) == 0 |
25314 | || strcmp (s1: language_string, s2: "GNU C23" ) == 0) |
25315 | language = DW_LANG_C11; |
25316 | } |
25317 | } |
25318 | else if (startswith (str: language_string, prefix: "GNU C++" )) |
25319 | { |
25320 | language = DW_LANG_C_plus_plus; |
25321 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
25322 | { |
25323 | if (strcmp (s1: language_string, s2: "GNU C++11" ) == 0) |
25324 | language = DW_LANG_C_plus_plus_11; |
25325 | else if (strcmp (s1: language_string, s2: "GNU C++14" ) == 0) |
25326 | language = DW_LANG_C_plus_plus_14; |
25327 | else if (strcmp (s1: language_string, s2: "GNU C++17" ) == 0 |
25328 | || strcmp (s1: language_string, s2: "GNU C++20" ) == 0 |
25329 | || strcmp (s1: language_string, s2: "GNU C++23" ) == 0 |
25330 | || strcmp (s1: language_string, s2: "GNU C++26" ) == 0) |
25331 | /* For now. */ |
25332 | language = DW_LANG_C_plus_plus_14; |
25333 | } |
25334 | } |
25335 | else if (strcmp (s1: language_string, s2: "GNU F77" ) == 0) |
25336 | language = DW_LANG_Fortran77; |
25337 | else if (strcmp (s1: language_string, s2: "GNU Modula-2" ) == 0) |
25338 | language = DW_LANG_Modula2; |
25339 | else if (dwarf_version >= 3 || !dwarf_strict) |
25340 | { |
25341 | if (strcmp (s1: language_string, s2: "GNU Ada" ) == 0) |
25342 | language = DW_LANG_Ada95; |
25343 | else if (startswith (str: language_string, prefix: "GNU Fortran" )) |
25344 | { |
25345 | language = DW_LANG_Fortran95; |
25346 | if (dwarf_version >= 5 /* || !dwarf_strict */) |
25347 | { |
25348 | if (strcmp (s1: language_string, s2: "GNU Fortran2003" ) == 0) |
25349 | language = DW_LANG_Fortran03; |
25350 | else if (strcmp (s1: language_string, s2: "GNU Fortran2008" ) == 0) |
25351 | language = DW_LANG_Fortran08; |
25352 | } |
25353 | } |
25354 | else if (strcmp (s1: language_string, s2: "GNU Objective-C" ) == 0) |
25355 | language = DW_LANG_ObjC; |
25356 | else if (strcmp (s1: language_string, s2: "GNU Objective-C++" ) == 0) |
25357 | language = DW_LANG_ObjC_plus_plus; |
25358 | else if (strcmp (s1: language_string, s2: "GNU D" ) == 0) |
25359 | language = DW_LANG_D; |
25360 | else if (dwarf_version >= 5 || !dwarf_strict) |
25361 | { |
25362 | if (strcmp (s1: language_string, s2: "GNU Go" ) == 0) |
25363 | language = DW_LANG_Go; |
25364 | else if (strcmp (s1: language_string, s2: "GNU Rust" ) == 0) |
25365 | language = DW_LANG_Rust; |
25366 | } |
25367 | } |
25368 | /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */ |
25369 | else if (startswith (str: language_string, prefix: "GNU Fortran" )) |
25370 | language = DW_LANG_Fortran90; |
25371 | /* Likewise for Ada. */ |
25372 | else if (strcmp (s1: language_string, s2: "GNU Ada" ) == 0) |
25373 | language = DW_LANG_Ada83; |
25374 | |
25375 | add_AT_unsigned (die, attr_kind: DW_AT_language, unsigned_val: language); |
25376 | |
25377 | switch (language) |
25378 | { |
25379 | case DW_LANG_Fortran77: |
25380 | case DW_LANG_Fortran90: |
25381 | case DW_LANG_Fortran95: |
25382 | case DW_LANG_Fortran03: |
25383 | case DW_LANG_Fortran08: |
25384 | /* Fortran has case insensitive identifiers and the front-end |
25385 | lowercases everything. */ |
25386 | add_AT_unsigned (die, attr_kind: DW_AT_identifier_case, unsigned_val: DW_ID_down_case); |
25387 | break; |
25388 | default: |
25389 | /* The default DW_ID_case_sensitive doesn't need to be specified. */ |
25390 | break; |
25391 | } |
25392 | return die; |
25393 | } |
25394 | |
25395 | /* Generate the DIE for a base class. */ |
25396 | |
25397 | static void |
25398 | gen_inheritance_die (tree binfo, tree access, tree type, |
25399 | dw_die_ref context_die) |
25400 | { |
25401 | dw_die_ref die = new_die (tag_value: DW_TAG_inheritance, parent_die: context_die, t: binfo); |
25402 | struct vlr_context ctx = { .struct_type: type, NULL }; |
25403 | |
25404 | add_type_attribute (object_die: die, BINFO_TYPE (binfo), cv_quals: TYPE_UNQUALIFIED, reverse: false, |
25405 | context_die); |
25406 | add_data_member_location_attribute (die, decl: binfo, ctx: &ctx); |
25407 | |
25408 | if (BINFO_VIRTUAL_P (binfo)) |
25409 | add_AT_unsigned (die, attr_kind: DW_AT_virtuality, unsigned_val: DW_VIRTUALITY_virtual); |
25410 | |
25411 | /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type |
25412 | children, otherwise the default is DW_ACCESS_public. In DWARF2 |
25413 | the default has always been DW_ACCESS_private. */ |
25414 | if (access == access_public_node) |
25415 | { |
25416 | if (dwarf_version == 2 |
25417 | || context_die->die_tag == DW_TAG_class_type) |
25418 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_public); |
25419 | } |
25420 | else if (access == access_protected_node) |
25421 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_protected); |
25422 | else if (dwarf_version > 2 |
25423 | && context_die->die_tag != DW_TAG_class_type) |
25424 | add_AT_unsigned (die, attr_kind: DW_AT_accessibility, unsigned_val: DW_ACCESS_private); |
25425 | } |
25426 | |
25427 | /* Return whether DECL is a FIELD_DECL that represents the variant part of a |
25428 | structure. */ |
25429 | |
25430 | static bool |
25431 | is_variant_part (tree decl) |
25432 | { |
25433 | return (TREE_CODE (decl) == FIELD_DECL |
25434 | && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE); |
25435 | } |
25436 | |
25437 | /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is, |
25438 | return the FIELD_DECL. Return NULL_TREE otherwise. */ |
25439 | |
25440 | static tree |
25441 | analyze_discr_in_predicate (tree operand, tree struct_type) |
25442 | { |
25443 | while (CONVERT_EXPR_P (operand)) |
25444 | operand = TREE_OPERAND (operand, 0); |
25445 | |
25446 | /* Match field access to members of struct_type only. */ |
25447 | if (TREE_CODE (operand) == COMPONENT_REF |
25448 | && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR |
25449 | && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type |
25450 | && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL) |
25451 | return TREE_OPERAND (operand, 1); |
25452 | else |
25453 | return NULL_TREE; |
25454 | } |
25455 | |
25456 | /* Check that SRC is a constant integer that can be represented as a native |
25457 | integer constant (either signed or unsigned). If so, store it into DEST and |
25458 | return true. Return false otherwise. */ |
25459 | |
25460 | static bool |
25461 | get_discr_value (tree src, dw_discr_value *dest) |
25462 | { |
25463 | tree discr_type = TREE_TYPE (src); |
25464 | |
25465 | if (lang_hooks.types.get_debug_type) |
25466 | { |
25467 | tree debug_type = lang_hooks.types.get_debug_type (discr_type); |
25468 | if (debug_type != NULL) |
25469 | discr_type = debug_type; |
25470 | } |
25471 | |
25472 | if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type)) |
25473 | return false; |
25474 | |
25475 | /* Signedness can vary between the original type and the debug type. This |
25476 | can happen for character types in Ada for instance: the character type |
25477 | used for code generation can be signed, to be compatible with the C one, |
25478 | but from a debugger point of view, it must be unsigned. */ |
25479 | bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src)); |
25480 | bool is_debug_unsigned = TYPE_UNSIGNED (discr_type); |
25481 | |
25482 | if (is_orig_unsigned != is_debug_unsigned) |
25483 | src = fold_convert (discr_type, src); |
25484 | |
25485 | if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src))) |
25486 | return false; |
25487 | |
25488 | dest->pos = is_debug_unsigned; |
25489 | if (is_debug_unsigned) |
25490 | dest->v.uval = tree_to_uhwi (src); |
25491 | else |
25492 | dest->v.sval = tree_to_shwi (src); |
25493 | |
25494 | return true; |
25495 | } |
25496 | |
25497 | /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a |
25498 | FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful, |
25499 | store NULL_TREE in DISCR_DECL. Otherwise: |
25500 | |
25501 | - store the discriminant field in STRUCT_TYPE that controls the variant |
25502 | part to *DISCR_DECL |
25503 | |
25504 | - put in *DISCR_LISTS_P an array where for each variant, the item |
25505 | represents the corresponding matching list of discriminant values. |
25506 | |
25507 | - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of |
25508 | the above array. |
25509 | |
25510 | Note that when the array is allocated (i.e. when the analysis is |
25511 | successful), it is up to the caller to free the array. */ |
25512 | |
25513 | static void |
25514 | analyze_variants_discr (tree variant_part_decl, |
25515 | tree struct_type, |
25516 | tree *discr_decl, |
25517 | dw_discr_list_ref **discr_lists_p, |
25518 | unsigned *discr_lists_length) |
25519 | { |
25520 | tree variant_part_type = TREE_TYPE (variant_part_decl); |
25521 | tree variant; |
25522 | dw_discr_list_ref *discr_lists; |
25523 | unsigned i; |
25524 | |
25525 | /* Compute how many variants there are in this variant part. */ |
25526 | *discr_lists_length = 0; |
25527 | for (variant = TYPE_FIELDS (variant_part_type); |
25528 | variant != NULL_TREE; |
25529 | variant = DECL_CHAIN (variant)) |
25530 | ++*discr_lists_length; |
25531 | |
25532 | *discr_decl = NULL_TREE; |
25533 | *discr_lists_p |
25534 | = (dw_discr_list_ref *) xcalloc (*discr_lists_length, |
25535 | sizeof (**discr_lists_p)); |
25536 | discr_lists = *discr_lists_p; |
25537 | |
25538 | /* And then analyze all variants to extract discriminant information for all |
25539 | of them. This analysis is conservative: as soon as we detect something we |
25540 | do not support, abort everything and pretend we found nothing. */ |
25541 | for (variant = TYPE_FIELDS (variant_part_type), i = 0; |
25542 | variant != NULL_TREE; |
25543 | variant = DECL_CHAIN (variant), ++i) |
25544 | { |
25545 | tree match_expr = DECL_QUALIFIER (variant); |
25546 | |
25547 | /* Now, try to analyze the predicate and deduce a discriminant for |
25548 | it. */ |
25549 | if (match_expr == boolean_true_node) |
25550 | /* Typically happens for the default variant: it matches all cases that |
25551 | previous variants rejected. Don't output any matching value for |
25552 | this one. */ |
25553 | continue; |
25554 | |
25555 | /* The following loop tries to iterate over each discriminant |
25556 | possibility: single values or ranges. */ |
25557 | while (match_expr != NULL_TREE) |
25558 | { |
25559 | tree next_round_match_expr; |
25560 | tree candidate_discr = NULL_TREE; |
25561 | dw_discr_list_ref new_node = NULL; |
25562 | |
25563 | /* Possibilities are matched one after the other by nested |
25564 | TRUTH_ORIF_EXPR expressions. Process the current possibility and |
25565 | continue with the rest at next iteration. */ |
25566 | if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR) |
25567 | { |
25568 | next_round_match_expr = TREE_OPERAND (match_expr, 0); |
25569 | match_expr = TREE_OPERAND (match_expr, 1); |
25570 | } |
25571 | else |
25572 | next_round_match_expr = NULL_TREE; |
25573 | |
25574 | if (match_expr == boolean_false_node) |
25575 | /* This sub-expression matches nothing: just wait for the next |
25576 | one. */ |
25577 | ; |
25578 | |
25579 | else if (TREE_CODE (match_expr) == EQ_EXPR) |
25580 | { |
25581 | /* We are matching: <discr_field> == <integer_cst> |
25582 | This sub-expression matches a single value. */ |
25583 | tree integer_cst = TREE_OPERAND (match_expr, 1); |
25584 | |
25585 | candidate_discr |
25586 | = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0), |
25587 | struct_type); |
25588 | |
25589 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
25590 | if (!get_discr_value (src: integer_cst, |
25591 | dest: &new_node->dw_discr_lower_bound)) |
25592 | goto abort; |
25593 | new_node->dw_discr_range = false; |
25594 | } |
25595 | |
25596 | else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR) |
25597 | { |
25598 | /* We are matching: |
25599 | <discr_field> > <integer_cst> |
25600 | && <discr_field> < <integer_cst>. |
25601 | This sub-expression matches the range of values between the |
25602 | two matched integer constants. Note that comparisons can be |
25603 | inclusive or exclusive. */ |
25604 | tree candidate_discr_1, candidate_discr_2; |
25605 | tree lower_cst, upper_cst; |
25606 | bool lower_cst_included, upper_cst_included; |
25607 | tree lower_op = TREE_OPERAND (match_expr, 0); |
25608 | tree upper_op = TREE_OPERAND (match_expr, 1); |
25609 | |
25610 | /* When the comparison is exclusive, the integer constant is not |
25611 | the discriminant range bound we are looking for: we will have |
25612 | to increment or decrement it. */ |
25613 | if (TREE_CODE (lower_op) == GE_EXPR) |
25614 | lower_cst_included = true; |
25615 | else if (TREE_CODE (lower_op) == GT_EXPR) |
25616 | lower_cst_included = false; |
25617 | else |
25618 | goto abort; |
25619 | |
25620 | if (TREE_CODE (upper_op) == LE_EXPR) |
25621 | upper_cst_included = true; |
25622 | else if (TREE_CODE (upper_op) == LT_EXPR) |
25623 | upper_cst_included = false; |
25624 | else |
25625 | goto abort; |
25626 | |
25627 | /* Extract the discriminant from the first operand and check it |
25628 | is consistant with the same analysis in the second |
25629 | operand. */ |
25630 | candidate_discr_1 |
25631 | = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0), |
25632 | struct_type); |
25633 | candidate_discr_2 |
25634 | = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0), |
25635 | struct_type); |
25636 | if (candidate_discr_1 == candidate_discr_2) |
25637 | candidate_discr = candidate_discr_1; |
25638 | else |
25639 | goto abort; |
25640 | |
25641 | /* Extract bounds from both. */ |
25642 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
25643 | lower_cst = TREE_OPERAND (lower_op, 1); |
25644 | upper_cst = TREE_OPERAND (upper_op, 1); |
25645 | |
25646 | if (!lower_cst_included) |
25647 | lower_cst |
25648 | = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst, |
25649 | build_int_cst (TREE_TYPE (lower_cst), 1)); |
25650 | if (!upper_cst_included) |
25651 | upper_cst |
25652 | = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst, |
25653 | build_int_cst (TREE_TYPE (upper_cst), 1)); |
25654 | |
25655 | if (!get_discr_value (src: lower_cst, |
25656 | dest: &new_node->dw_discr_lower_bound) |
25657 | || !get_discr_value (src: upper_cst, |
25658 | dest: &new_node->dw_discr_upper_bound)) |
25659 | goto abort; |
25660 | |
25661 | new_node->dw_discr_range = true; |
25662 | } |
25663 | |
25664 | else if ((candidate_discr |
25665 | = analyze_discr_in_predicate (operand: match_expr, struct_type)) |
25666 | && (TREE_TYPE (candidate_discr) == boolean_type_node |
25667 | || TREE_TYPE (TREE_TYPE (candidate_discr)) |
25668 | == boolean_type_node)) |
25669 | { |
25670 | /* We are matching: <discr_field> for a boolean discriminant. |
25671 | This sub-expression matches boolean_true_node. */ |
25672 | new_node = ggc_cleared_alloc<dw_discr_list_node> (); |
25673 | if (!get_discr_value (boolean_true_node, |
25674 | dest: &new_node->dw_discr_lower_bound)) |
25675 | goto abort; |
25676 | new_node->dw_discr_range = false; |
25677 | } |
25678 | |
25679 | else |
25680 | /* Unsupported sub-expression: we cannot determine the set of |
25681 | matching discriminant values. Abort everything. */ |
25682 | goto abort; |
25683 | |
25684 | /* If the discriminant info is not consistant with what we saw so |
25685 | far, consider the analysis failed and abort everything. */ |
25686 | if (candidate_discr == NULL_TREE |
25687 | || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl)) |
25688 | goto abort; |
25689 | else |
25690 | *discr_decl = candidate_discr; |
25691 | |
25692 | if (new_node != NULL) |
25693 | { |
25694 | new_node->dw_discr_next = discr_lists[i]; |
25695 | discr_lists[i] = new_node; |
25696 | } |
25697 | match_expr = next_round_match_expr; |
25698 | } |
25699 | } |
25700 | |
25701 | /* If we reach this point, we could match everything we were interested |
25702 | in. */ |
25703 | return; |
25704 | |
25705 | abort: |
25706 | /* Clean all data structure and return no result. */ |
25707 | free (ptr: *discr_lists_p); |
25708 | *discr_lists_p = NULL; |
25709 | *discr_decl = NULL_TREE; |
25710 | } |
25711 | |
25712 | /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part |
25713 | of STRUCT_TYPE, a record type. This new DIE is emitted as the next child |
25714 | under CONTEXT_DIE. |
25715 | |
25716 | Variant parts are supposed to be implemented as a FIELD_DECL whose type is a |
25717 | QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for |
25718 | this type, which are record types, represent the available variants and each |
25719 | has a DECL_QUALIFIER attribute. The discriminant and the discriminant |
25720 | values are inferred from these attributes. |
25721 | |
25722 | In trees, the offsets for the fields inside these sub-records are relative |
25723 | to the variant part itself, whereas the corresponding DIEs should have |
25724 | offset attributes that are relative to the embedding record base address. |
25725 | This is why the caller must provide a VARIANT_PART_OFFSET expression: it |
25726 | must be an expression that computes the offset of the variant part to |
25727 | describe in DWARF. */ |
25728 | |
25729 | static void |
25730 | gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx, |
25731 | dw_die_ref context_die) |
25732 | { |
25733 | const tree variant_part_type = TREE_TYPE (variant_part_decl); |
25734 | tree variant_part_offset = vlr_ctx->variant_part_offset; |
25735 | |
25736 | /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or |
25737 | NULL_TREE if there is no such field. */ |
25738 | tree discr_decl = NULL_TREE; |
25739 | dw_discr_list_ref *discr_lists; |
25740 | unsigned discr_lists_length = 0; |
25741 | unsigned i; |
25742 | |
25743 | dw_die_ref dwarf_proc_die = NULL; |
25744 | dw_die_ref variant_part_die |
25745 | = new_die (tag_value: DW_TAG_variant_part, parent_die: context_die, t: variant_part_type); |
25746 | |
25747 | equate_decl_number_to_die (decl: variant_part_decl, decl_die: variant_part_die); |
25748 | |
25749 | analyze_variants_discr (variant_part_decl, struct_type: vlr_ctx->struct_type, |
25750 | discr_decl: &discr_decl, discr_lists_p: &discr_lists, discr_lists_length: &discr_lists_length); |
25751 | |
25752 | if (discr_decl != NULL_TREE) |
25753 | { |
25754 | dw_die_ref discr_die = lookup_decl_die (decl: discr_decl); |
25755 | |
25756 | if (discr_die) |
25757 | add_AT_die_ref (die: variant_part_die, attr_kind: DW_AT_discr, targ_die: discr_die); |
25758 | else |
25759 | /* We have no DIE for the discriminant, so just discard all |
25760 | discrimimant information in the output. */ |
25761 | discr_decl = NULL_TREE; |
25762 | } |
25763 | |
25764 | /* If the offset for this variant part is more complex than a constant, |
25765 | create a DWARF procedure for it so that we will not have to generate |
25766 | DWARF expressions for it for each member. */ |
25767 | if (TREE_CODE (variant_part_offset) != INTEGER_CST |
25768 | && (dwarf_version >= 3 || !dwarf_strict)) |
25769 | { |
25770 | struct loc_descr_context ctx = { |
25771 | .context_type: vlr_ctx->struct_type, /* context_type */ |
25772 | NULL_TREE, /* base_decl */ |
25773 | NULL, /* dpi */ |
25774 | .placeholder_arg: false, /* placeholder_arg */ |
25775 | .placeholder_seen: false, /* placeholder_seen */ |
25776 | .strict_signedness: false /* strict_signedness */ |
25777 | }; |
25778 | const tree dwarf_proc_fndecl |
25779 | = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE, |
25780 | build_function_type (TREE_TYPE (variant_part_offset), |
25781 | NULL_TREE)); |
25782 | const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0); |
25783 | const dw_loc_descr_ref dwarf_proc_body |
25784 | = loc_descriptor_from_tree (loc: variant_part_offset, want_address: 0, context: &ctx); |
25785 | |
25786 | dwarf_proc_die = new_dwarf_proc_die (location: dwarf_proc_body, |
25787 | fndecl: dwarf_proc_fndecl, parent_die: context_die); |
25788 | if (dwarf_proc_die != NULL) |
25789 | variant_part_offset = dwarf_proc_call; |
25790 | } |
25791 | |
25792 | /* Output DIEs for all variants. */ |
25793 | i = 0; |
25794 | for (tree variant = TYPE_FIELDS (variant_part_type); |
25795 | variant != NULL_TREE; |
25796 | variant = DECL_CHAIN (variant), ++i) |
25797 | { |
25798 | tree variant_type = TREE_TYPE (variant); |
25799 | dw_die_ref variant_die; |
25800 | |
25801 | /* All variants (i.e. members of a variant part) are supposed to be |
25802 | encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields |
25803 | under these records. */ |
25804 | gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE); |
25805 | |
25806 | variant_die = new_die (tag_value: DW_TAG_variant, parent_die: variant_part_die, t: variant_type); |
25807 | equate_decl_number_to_die (decl: variant, decl_die: variant_die); |
25808 | |
25809 | /* Output discriminant values this variant matches, if any. */ |
25810 | if (discr_decl == NULL || discr_lists[i] == NULL) |
25811 | /* In the case we have discriminant information at all, this is |
25812 | probably the default variant: as the standard says, don't |
25813 | output any discriminant value/list attribute. */ |
25814 | ; |
25815 | else if (discr_lists[i]->dw_discr_next == NULL |
25816 | && !discr_lists[i]->dw_discr_range) |
25817 | /* If there is only one accepted value, don't bother outputting a |
25818 | list. */ |
25819 | add_discr_value (die: variant_die, value: &discr_lists[i]->dw_discr_lower_bound); |
25820 | else |
25821 | add_discr_list (die: variant_die, discr_list: discr_lists[i]); |
25822 | |
25823 | for (tree member = TYPE_FIELDS (variant_type); |
25824 | member != NULL_TREE; |
25825 | member = DECL_CHAIN (member)) |
25826 | { |
25827 | struct vlr_context vlr_sub_ctx = { |
25828 | .struct_type: vlr_ctx->struct_type, /* struct_type */ |
25829 | NULL /* variant_part_offset */ |
25830 | }; |
25831 | if (is_variant_part (decl: member)) |
25832 | { |
25833 | /* All offsets for fields inside variant parts are relative to |
25834 | the top-level embedding RECORD_TYPE's base address. On the |
25835 | other hand, offsets in GCC's types are relative to the |
25836 | nested-most variant part. So we have to sum offsets each time |
25837 | we recurse. */ |
25838 | |
25839 | vlr_sub_ctx.variant_part_offset |
25840 | = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset), |
25841 | variant_part_offset, byte_position (member)); |
25842 | gen_variant_part (variant_part_decl: member, vlr_ctx: &vlr_sub_ctx, context_die: variant_die); |
25843 | } |
25844 | else |
25845 | { |
25846 | vlr_sub_ctx.variant_part_offset = variant_part_offset; |
25847 | gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die); |
25848 | } |
25849 | } |
25850 | } |
25851 | |
25852 | free (ptr: discr_lists); |
25853 | } |
25854 | |
25855 | /* Generate a DIE for a class member. */ |
25856 | |
25857 | static void |
25858 | gen_member_die (tree type, dw_die_ref context_die) |
25859 | { |
25860 | tree member; |
25861 | tree binfo = TYPE_BINFO (type); |
25862 | |
25863 | gcc_assert (TYPE_MAIN_VARIANT (type) == type); |
25864 | |
25865 | /* If this is not an incomplete type, output descriptions of each of its |
25866 | members. Note that as we output the DIEs necessary to represent the |
25867 | members of this record or union type, we will also be trying to output |
25868 | DIEs to represent the *types* of those members. However the `type' |
25869 | function (above) will specifically avoid generating type DIEs for member |
25870 | types *within* the list of member DIEs for this (containing) type except |
25871 | for those types (of members) which are explicitly marked as also being |
25872 | members of this (containing) type themselves. The g++ front- end can |
25873 | force any given type to be treated as a member of some other (containing) |
25874 | type by setting the TYPE_CONTEXT of the given (member) type to point to |
25875 | the TREE node representing the appropriate (containing) type. */ |
25876 | |
25877 | /* First output info about the base classes. */ |
25878 | if (binfo && early_dwarf) |
25879 | { |
25880 | vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo); |
25881 | int i; |
25882 | tree base; |
25883 | |
25884 | for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++) |
25885 | gen_inheritance_die (binfo: base, |
25886 | access: (accesses ? (*accesses)[i] : access_public_node), |
25887 | type, |
25888 | context_die); |
25889 | } |
25890 | |
25891 | /* Now output info about the members. */ |
25892 | for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member)) |
25893 | { |
25894 | /* Ignore clones. */ |
25895 | if (DECL_ABSTRACT_ORIGIN (member)) |
25896 | continue; |
25897 | |
25898 | struct vlr_context vlr_ctx = { .struct_type: type, NULL_TREE }; |
25899 | bool static_inline_p |
25900 | = (VAR_P (member) |
25901 | && TREE_STATIC (member) |
25902 | && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline) |
25903 | != -1)); |
25904 | |
25905 | /* If we thought we were generating minimal debug info for TYPE |
25906 | and then changed our minds, some of the member declarations |
25907 | may have already been defined. Don't define them again, but |
25908 | do put them in the right order. */ |
25909 | |
25910 | if (dw_die_ref child = lookup_decl_die (decl: member)) |
25911 | { |
25912 | /* Handle inline static data members, which only have in-class |
25913 | declarations. */ |
25914 | bool splice = true; |
25915 | |
25916 | dw_die_ref ref = NULL; |
25917 | if (child->die_tag == DW_TAG_variable |
25918 | && child->die_parent == comp_unit_die ()) |
25919 | { |
25920 | ref = get_AT_ref (die: child, attr_kind: DW_AT_specification); |
25921 | |
25922 | /* For C++17 inline static data members followed by redundant |
25923 | out of class redeclaration, we might get here with |
25924 | child being the DIE created for the out of class |
25925 | redeclaration and with its DW_AT_specification being |
25926 | the DIE created for in-class definition. We want to |
25927 | reparent the latter, and don't want to create another |
25928 | DIE with DW_AT_specification in that case, because |
25929 | we already have one. */ |
25930 | if (ref |
25931 | && static_inline_p |
25932 | && ref->die_tag == DW_TAG_variable |
25933 | && ref->die_parent == comp_unit_die () |
25934 | && get_AT (die: ref, attr_kind: DW_AT_specification) == NULL) |
25935 | { |
25936 | child = ref; |
25937 | ref = NULL; |
25938 | static_inline_p = false; |
25939 | } |
25940 | |
25941 | if (!ref) |
25942 | { |
25943 | reparent_child (child, new_parent: context_die); |
25944 | if (dwarf_version < 5) |
25945 | child->die_tag = DW_TAG_member; |
25946 | splice = false; |
25947 | } |
25948 | } |
25949 | else if (child->die_tag == DW_TAG_enumerator) |
25950 | /* Enumerators remain under their enumeration even if |
25951 | their names are introduced in the enclosing scope. */ |
25952 | splice = false; |
25953 | |
25954 | if (splice) |
25955 | splice_child_die (parent: context_die, child); |
25956 | } |
25957 | |
25958 | /* Do not generate DWARF for variant parts if we are generating the |
25959 | corresponding GNAT encodings: DIEs generated for the two schemes |
25960 | would conflict in our mappings. */ |
25961 | else if (is_variant_part (decl: member) |
25962 | && gnat_encodings != DWARF_GNAT_ENCODINGS_ALL) |
25963 | { |
25964 | vlr_ctx.variant_part_offset = byte_position (member); |
25965 | gen_variant_part (variant_part_decl: member, vlr_ctx: &vlr_ctx, context_die); |
25966 | } |
25967 | else |
25968 | { |
25969 | vlr_ctx.variant_part_offset = NULL_TREE; |
25970 | gen_decl_die (member, NULL, &vlr_ctx, context_die); |
25971 | } |
25972 | |
25973 | /* For C++ inline static data members emit immediately a DW_TAG_variable |
25974 | DIE that will refer to that DW_TAG_member/DW_TAG_variable through |
25975 | DW_AT_specification. */ |
25976 | if (static_inline_p) |
25977 | { |
25978 | int old_extern = DECL_EXTERNAL (member); |
25979 | DECL_EXTERNAL (member) = 0; |
25980 | gen_decl_die (member, NULL, NULL, comp_unit_die ()); |
25981 | DECL_EXTERNAL (member) = old_extern; |
25982 | } |
25983 | } |
25984 | } |
25985 | |
25986 | /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG |
25987 | is set, we pretend that the type was never defined, so we only get the |
25988 | member DIEs needed by later specification DIEs. */ |
25989 | |
25990 | static void |
25991 | gen_struct_or_union_type_die (tree type, dw_die_ref context_die, |
25992 | enum debug_info_usage usage) |
25993 | { |
25994 | if (TREE_ASM_WRITTEN (type)) |
25995 | { |
25996 | /* Fill in the bound of variable-length fields in late dwarf if |
25997 | still incomplete. */ |
25998 | if (!early_dwarf && variably_modified_type_p (type, NULL)) |
25999 | for (tree member = TYPE_FIELDS (type); |
26000 | member; |
26001 | member = DECL_CHAIN (member)) |
26002 | fill_variable_array_bounds (TREE_TYPE (member)); |
26003 | return; |
26004 | } |
26005 | |
26006 | dw_die_ref type_die = lookup_type_die (type); |
26007 | dw_die_ref scope_die = 0; |
26008 | bool nested = false; |
26009 | bool complete = (TYPE_SIZE (type) |
26010 | && (! TYPE_STUB_DECL (type) |
26011 | || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type)))); |
26012 | bool ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace); |
26013 | complete = complete && should_emit_struct_debug (type, usage); |
26014 | |
26015 | if (type_die && ! complete) |
26016 | return; |
26017 | |
26018 | if (TYPE_CONTEXT (type) != NULL_TREE |
26019 | && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type)) |
26020 | || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL)) |
26021 | nested = true; |
26022 | |
26023 | scope_die = scope_die_for (t: type, context_die); |
26024 | |
26025 | /* Generate child dies for template parameters. */ |
26026 | if (!type_die && debug_info_level > DINFO_LEVEL_TERSE) |
26027 | schedule_generic_params_dies_gen (t: type); |
26028 | |
26029 | if (! type_die || (nested && is_cu_die (c: scope_die))) |
26030 | /* First occurrence of type or toplevel definition of nested class. */ |
26031 | { |
26032 | dw_die_ref old_die = type_die; |
26033 | |
26034 | type_die = new_die (TREE_CODE (type) == RECORD_TYPE |
26035 | ? record_type_tag (type) : DW_TAG_union_type, |
26036 | parent_die: scope_die, t: type); |
26037 | equate_type_number_to_die (type, type_die); |
26038 | if (old_die) |
26039 | add_AT_specification (die: type_die, targ_die: old_die); |
26040 | else |
26041 | add_name_attribute (die: type_die, name_string: type_tag (type)); |
26042 | } |
26043 | else |
26044 | remove_AT (die: type_die, attr_kind: DW_AT_declaration); |
26045 | |
26046 | /* If this type has been completed, then give it a byte_size attribute and |
26047 | then give a list of members. */ |
26048 | if (complete && !ns_decl) |
26049 | { |
26050 | /* Prevent infinite recursion in cases where the type of some member of |
26051 | this type is expressed in terms of this type itself. */ |
26052 | TREE_ASM_WRITTEN (type) = 1; |
26053 | add_byte_size_attribute (die: type_die, tree_node: type); |
26054 | add_alignment_attribute (die: type_die, tree_node: type); |
26055 | if (TYPE_STUB_DECL (type) != NULL_TREE) |
26056 | { |
26057 | add_src_coords_attributes (die: type_die, TYPE_STUB_DECL (type)); |
26058 | add_accessibility_attribute (die: type_die, TYPE_STUB_DECL (type)); |
26059 | } |
26060 | |
26061 | /* If the first reference to this type was as the return type of an |
26062 | inline function, then it may not have a parent. Fix this now. */ |
26063 | if (type_die->die_parent == NULL) |
26064 | add_child_die (die: scope_die, child_die: type_die); |
26065 | |
26066 | gen_member_die (type, context_die: type_die); |
26067 | |
26068 | add_gnat_descriptive_type_attribute (die: type_die, type, context_die); |
26069 | if (TYPE_ARTIFICIAL (type)) |
26070 | add_AT_flag (die: type_die, attr_kind: DW_AT_artificial, flag: 1); |
26071 | |
26072 | /* GNU extension: Record what type our vtable lives in. */ |
26073 | if (TYPE_VFIELD (type)) |
26074 | { |
26075 | tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type)); |
26076 | |
26077 | gen_type_die (vtype, context_die); |
26078 | add_AT_die_ref (die: type_die, attr_kind: DW_AT_containing_type, |
26079 | targ_die: lookup_type_die (type: vtype)); |
26080 | } |
26081 | } |
26082 | else |
26083 | { |
26084 | add_AT_flag (die: type_die, attr_kind: DW_AT_declaration, flag: 1); |
26085 | |
26086 | /* We don't need to do this for function-local types. */ |
26087 | if (TYPE_STUB_DECL (type) |
26088 | && ! decl_function_context (TYPE_STUB_DECL (type))) |
26089 | vec_safe_push (v&: incomplete_types, obj: type); |
26090 | } |
26091 | |
26092 | if (get_AT (die: type_die, attr_kind: DW_AT_name)) |
26093 | add_pubtype (decl: type, die: type_die); |
26094 | } |
26095 | |
26096 | /* Generate a DIE for a subroutine _type_. */ |
26097 | |
26098 | static void |
26099 | gen_subroutine_type_die (tree type, dw_die_ref context_die) |
26100 | { |
26101 | tree return_type = TREE_TYPE (type); |
26102 | dw_die_ref subr_die |
26103 | = new_die (tag_value: DW_TAG_subroutine_type, |
26104 | parent_die: scope_die_for (t: type, context_die), t: type); |
26105 | |
26106 | equate_type_number_to_die (type, type_die: subr_die); |
26107 | add_prototyped_attribute (die: subr_die, func_type: type); |
26108 | add_type_attribute (object_die: subr_die, type: return_type, cv_quals: TYPE_UNQUALIFIED, reverse: false, |
26109 | context_die); |
26110 | add_alignment_attribute (die: subr_die, tree_node: type); |
26111 | gen_formal_types_die (function_or_method_type: type, context_die: subr_die); |
26112 | |
26113 | if (get_AT (die: subr_die, attr_kind: DW_AT_name)) |
26114 | add_pubtype (decl: type, die: subr_die); |
26115 | if ((dwarf_version >= 5 || !dwarf_strict) |
26116 | && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1) |
26117 | add_AT_flag (die: subr_die, attr_kind: DW_AT_reference, flag: 1); |
26118 | if ((dwarf_version >= 5 || !dwarf_strict) |
26119 | && lang_hooks.types.type_dwarf_attribute (type, |
26120 | DW_AT_rvalue_reference) != -1) |
26121 | add_AT_flag (die: subr_die, attr_kind: DW_AT_rvalue_reference, flag: 1); |
26122 | } |
26123 | |
26124 | /* Generate a DIE for a type definition. */ |
26125 | |
26126 | static void |
26127 | gen_typedef_die (tree decl, dw_die_ref context_die) |
26128 | { |
26129 | dw_die_ref type_die; |
26130 | tree type; |
26131 | |
26132 | if (TREE_ASM_WRITTEN (decl)) |
26133 | { |
26134 | if (DECL_ORIGINAL_TYPE (decl)) |
26135 | fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl)); |
26136 | return; |
26137 | } |
26138 | |
26139 | /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin |
26140 | checks in process_scope_var and modified_type_die), this should be called |
26141 | only for original types. */ |
26142 | gcc_assert (decl_ultimate_origin (decl) == NULL |
26143 | || decl_ultimate_origin (decl) == decl); |
26144 | |
26145 | TREE_ASM_WRITTEN (decl) = 1; |
26146 | type_die = new_die (tag_value: DW_TAG_typedef, parent_die: context_die, t: decl); |
26147 | |
26148 | add_name_and_src_coords_attributes (die: type_die, decl); |
26149 | if (DECL_ORIGINAL_TYPE (decl)) |
26150 | { |
26151 | type = DECL_ORIGINAL_TYPE (decl); |
26152 | if (type == error_mark_node) |
26153 | return; |
26154 | |
26155 | gcc_assert (type != TREE_TYPE (decl)); |
26156 | equate_type_number_to_die (TREE_TYPE (decl), type_die); |
26157 | } |
26158 | else |
26159 | { |
26160 | type = TREE_TYPE (decl); |
26161 | if (type == error_mark_node) |
26162 | return; |
26163 | |
26164 | if (is_naming_typedef_decl (TYPE_NAME (type))) |
26165 | { |
26166 | /* Here, we are in the case of decl being a typedef naming |
26167 | an anonymous type, e.g: |
26168 | typedef struct {...} foo; |
26169 | In that case TREE_TYPE (decl) is not a typedef variant |
26170 | type and TYPE_NAME of the anonymous type is set to the |
26171 | TYPE_DECL of the typedef. This construct is emitted by |
26172 | the C++ FE. |
26173 | |
26174 | TYPE is the anonymous struct named by the typedef |
26175 | DECL. As we need the DW_AT_type attribute of the |
26176 | DW_TAG_typedef to point to the DIE of TYPE, let's |
26177 | generate that DIE right away. add_type_attribute |
26178 | called below will then pick (via lookup_type_die) that |
26179 | anonymous struct DIE. */ |
26180 | if (!TREE_ASM_WRITTEN (type)) |
26181 | gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE); |
26182 | |
26183 | /* This is a GNU Extension. We are adding a |
26184 | DW_AT_linkage_name attribute to the DIE of the |
26185 | anonymous struct TYPE. The value of that attribute |
26186 | is the name of the typedef decl naming the anonymous |
26187 | struct. This greatly eases the work of consumers of |
26188 | this debug info. */ |
26189 | add_linkage_name_raw (die: lookup_type_die (type), decl); |
26190 | } |
26191 | } |
26192 | |
26193 | add_type_attribute (object_die: type_die, type, cv_quals: decl_quals (decl), reverse: false, |
26194 | context_die); |
26195 | |
26196 | if (is_naming_typedef_decl (decl)) |
26197 | /* We want that all subsequent calls to lookup_type_die with |
26198 | TYPE in argument yield the DW_TAG_typedef we have just |
26199 | created. */ |
26200 | equate_type_number_to_die (type, type_die); |
26201 | |
26202 | add_alignment_attribute (die: type_die, TREE_TYPE (decl)); |
26203 | |
26204 | add_accessibility_attribute (die: type_die, decl); |
26205 | |
26206 | if (DECL_ABSTRACT_P (decl)) |
26207 | equate_decl_number_to_die (decl, decl_die: type_die); |
26208 | |
26209 | if (get_AT (die: type_die, attr_kind: DW_AT_name)) |
26210 | add_pubtype (decl, die: type_die); |
26211 | } |
26212 | |
26213 | /* Generate a DIE for a struct, class, enum or union type. */ |
26214 | |
26215 | static void |
26216 | gen_tagged_type_die (tree type, |
26217 | dw_die_ref context_die, |
26218 | enum debug_info_usage usage, |
26219 | bool reverse) |
26220 | { |
26221 | if (type == NULL_TREE |
26222 | || !is_tagged_type (type)) |
26223 | return; |
26224 | |
26225 | if (TREE_ASM_WRITTEN (type)) |
26226 | ; |
26227 | /* If this is a nested type whose containing class hasn't been written |
26228 | out yet, writing it out will cover this one, too. This does not apply |
26229 | to instantiations of member class templates; they need to be added to |
26230 | the containing class as they are generated. FIXME: This hurts the |
26231 | idea of combining type decls from multiple TUs, since we can't predict |
26232 | what set of template instantiations we'll get. */ |
26233 | else if (TYPE_CONTEXT (type) |
26234 | && AGGREGATE_TYPE_P (TYPE_CONTEXT (type)) |
26235 | && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type))) |
26236 | { |
26237 | gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage); |
26238 | |
26239 | if (TREE_ASM_WRITTEN (type)) |
26240 | return; |
26241 | |
26242 | /* If that failed, attach ourselves to the stub. */ |
26243 | context_die = lookup_type_die (TYPE_CONTEXT (type)); |
26244 | } |
26245 | else if (TYPE_CONTEXT (type) != NULL_TREE |
26246 | && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL)) |
26247 | { |
26248 | /* If this type is local to a function that hasn't been written |
26249 | out yet, use a NULL context for now; it will be fixed up in |
26250 | decls_for_scope. */ |
26251 | context_die = lookup_decl_die (TYPE_CONTEXT (type)); |
26252 | /* A declaration DIE doesn't count; nested types need to go in the |
26253 | specification. */ |
26254 | if (context_die && is_declaration_die (die: context_die)) |
26255 | context_die = NULL; |
26256 | } |
26257 | else |
26258 | context_die = declare_in_namespace (type, context_die); |
26259 | |
26260 | if (TREE_CODE (type) == ENUMERAL_TYPE) |
26261 | { |
26262 | /* This might have been written out by the call to |
26263 | declare_in_namespace. */ |
26264 | if (!TREE_ASM_WRITTEN (type) || reverse) |
26265 | gen_enumeration_type_die (type, context_die, reverse); |
26266 | } |
26267 | else |
26268 | gen_struct_or_union_type_die (type, context_die, usage); |
26269 | |
26270 | /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix |
26271 | it up if it is ever completed. gen_*_type_die will set it for us |
26272 | when appropriate. */ |
26273 | } |
26274 | |
26275 | /* Generate a type description DIE. */ |
26276 | |
26277 | static void |
26278 | gen_type_die_with_usage (tree type, dw_die_ref context_die, |
26279 | enum debug_info_usage usage, bool reverse) |
26280 | { |
26281 | struct array_descr_info info; |
26282 | |
26283 | if (type == NULL_TREE || type == error_mark_node) |
26284 | return; |
26285 | |
26286 | if (flag_checking && type) |
26287 | verify_type (t: type); |
26288 | |
26289 | if (TYPE_NAME (type) != NULL_TREE |
26290 | && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
26291 | && is_redundant_typedef (TYPE_NAME (type)) |
26292 | && DECL_ORIGINAL_TYPE (TYPE_NAME (type))) |
26293 | /* The DECL of this type is a typedef we don't want to emit debug |
26294 | info for but we want debug info for its underlying typedef. |
26295 | This can happen for e.g, the injected-class-name of a C++ |
26296 | type. */ |
26297 | type = DECL_ORIGINAL_TYPE (TYPE_NAME (type)); |
26298 | |
26299 | /* If TYPE is a typedef type variant, let's generate debug info |
26300 | for the parent typedef which TYPE is a type of. */ |
26301 | if (typedef_variant_p (type)) |
26302 | { |
26303 | if (TREE_ASM_WRITTEN (type)) |
26304 | return; |
26305 | |
26306 | tree name = TYPE_NAME (type); |
26307 | tree origin = decl_ultimate_origin (decl: name); |
26308 | if (origin != NULL && origin != name) |
26309 | { |
26310 | gen_decl_die (origin, NULL, NULL, context_die); |
26311 | return; |
26312 | } |
26313 | |
26314 | /* Prevent broken recursion; we can't hand off to the same type. */ |
26315 | gcc_assert (DECL_ORIGINAL_TYPE (name) != type); |
26316 | |
26317 | /* Give typedefs the right scope. */ |
26318 | context_die = scope_die_for (t: type, context_die); |
26319 | |
26320 | TREE_ASM_WRITTEN (type) = 1; |
26321 | |
26322 | gen_decl_die (name, NULL, NULL, context_die); |
26323 | return; |
26324 | } |
26325 | |
26326 | /* If type is an anonymous tagged type named by a typedef, let's |
26327 | generate debug info for the typedef. */ |
26328 | if (is_naming_typedef_decl (TYPE_NAME (type))) |
26329 | { |
26330 | /* Give typedefs the right scope. */ |
26331 | context_die = scope_die_for (t: type, context_die); |
26332 | |
26333 | gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die); |
26334 | return; |
26335 | } |
26336 | |
26337 | if (lang_hooks.types.get_debug_type) |
26338 | { |
26339 | tree debug_type = lang_hooks.types.get_debug_type (type); |
26340 | |
26341 | if (debug_type != NULL_TREE && debug_type != type) |
26342 | { |
26343 | gen_type_die_with_usage (type: debug_type, context_die, usage, reverse); |
26344 | return; |
26345 | } |
26346 | } |
26347 | |
26348 | /* We are going to output a DIE to represent the unqualified version |
26349 | of this type (i.e. without any const or volatile qualifiers) so |
26350 | get the main variant (i.e. the unqualified version) of this type |
26351 | now. (Vectors and arrays are special because the debugging info is in the |
26352 | cloned type itself. Similarly function/method types can contain extra |
26353 | ref-qualification). */ |
26354 | if (FUNC_OR_METHOD_TYPE_P (type)) |
26355 | { |
26356 | /* For function/method types, can't use type_main_variant here, |
26357 | because that can have different ref-qualifiers for C++, |
26358 | but try to canonicalize. */ |
26359 | tree main = TYPE_MAIN_VARIANT (type); |
26360 | for (tree t = main; t; t = TYPE_NEXT_VARIANT (t)) |
26361 | if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0 |
26362 | && check_base_type (cand: t, base: main) |
26363 | && check_lang_type (cand: t, base: type)) |
26364 | { |
26365 | type = t; |
26366 | break; |
26367 | } |
26368 | } |
26369 | else if (TREE_CODE (type) != VECTOR_TYPE |
26370 | && TREE_CODE (type) != ARRAY_TYPE) |
26371 | type = type_main_variant (type); |
26372 | |
26373 | /* If this is an array type with hidden descriptor, handle it first. */ |
26374 | if (!TREE_ASM_WRITTEN (type) |
26375 | && lang_hooks.types.get_array_descr_info) |
26376 | { |
26377 | memset (s: &info, c: 0, n: sizeof (info)); |
26378 | if (lang_hooks.types.get_array_descr_info (type, &info)) |
26379 | { |
26380 | /* Fortran sometimes emits array types with no dimension. */ |
26381 | gcc_assert (info.ndimensions >= 0 |
26382 | && (info.ndimensions |
26383 | <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN)); |
26384 | gen_descr_array_type_die (type, info: &info, context_die); |
26385 | TREE_ASM_WRITTEN (type) = 1; |
26386 | return; |
26387 | } |
26388 | } |
26389 | |
26390 | if (TREE_ASM_WRITTEN (type) && !reverse) |
26391 | { |
26392 | /* Variable-length types may be incomplete even if |
26393 | TREE_ASM_WRITTEN. For such types, fall through to |
26394 | gen_array_type_die() and possibly fill in |
26395 | DW_AT_{upper,lower}_bound attributes. */ |
26396 | if ((TREE_CODE (type) != ARRAY_TYPE |
26397 | && TREE_CODE (type) != RECORD_TYPE |
26398 | && TREE_CODE (type) != UNION_TYPE |
26399 | && TREE_CODE (type) != QUAL_UNION_TYPE) |
26400 | || !variably_modified_type_p (type, NULL)) |
26401 | return; |
26402 | } |
26403 | |
26404 | switch (TREE_CODE (type)) |
26405 | { |
26406 | case ERROR_MARK: |
26407 | break; |
26408 | |
26409 | case POINTER_TYPE: |
26410 | case REFERENCE_TYPE: |
26411 | /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This |
26412 | ensures that the gen_type_die recursion will terminate even if the |
26413 | type is recursive. Recursive types are possible in Ada. */ |
26414 | /* ??? We could perhaps do this for all types before the switch |
26415 | statement. */ |
26416 | TREE_ASM_WRITTEN (type) = 1; |
26417 | |
26418 | /* For these types, all that is required is that we output a DIE (or a |
26419 | set of DIEs) to represent the "basis" type. */ |
26420 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
26421 | usage: DINFO_USAGE_IND_USE); |
26422 | break; |
26423 | |
26424 | case OFFSET_TYPE: |
26425 | /* This code is used for C++ pointer-to-data-member types. |
26426 | Output a description of the relevant class type. */ |
26427 | gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die, |
26428 | usage: DINFO_USAGE_IND_USE); |
26429 | |
26430 | /* Output a description of the type of the object pointed to. */ |
26431 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
26432 | usage: DINFO_USAGE_IND_USE); |
26433 | |
26434 | /* Now output a DIE to represent this pointer-to-data-member type |
26435 | itself. */ |
26436 | gen_ptr_to_mbr_type_die (type, context_die); |
26437 | break; |
26438 | |
26439 | case FUNCTION_TYPE: |
26440 | /* Force out return type (in case it wasn't forced out already). */ |
26441 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
26442 | usage: DINFO_USAGE_DIR_USE); |
26443 | gen_subroutine_type_die (type, context_die); |
26444 | break; |
26445 | |
26446 | case METHOD_TYPE: |
26447 | /* Force out return type (in case it wasn't forced out already). */ |
26448 | gen_type_die_with_usage (TREE_TYPE (type), context_die, |
26449 | usage: DINFO_USAGE_DIR_USE); |
26450 | gen_subroutine_type_die (type, context_die); |
26451 | break; |
26452 | |
26453 | case ARRAY_TYPE: |
26454 | case VECTOR_TYPE: |
26455 | gen_array_type_die (type, context_die); |
26456 | break; |
26457 | |
26458 | case ENUMERAL_TYPE: |
26459 | case RECORD_TYPE: |
26460 | case UNION_TYPE: |
26461 | case QUAL_UNION_TYPE: |
26462 | gen_tagged_type_die (type, context_die, usage, reverse); |
26463 | return; |
26464 | |
26465 | case VOID_TYPE: |
26466 | case OPAQUE_TYPE: |
26467 | case INTEGER_TYPE: |
26468 | case REAL_TYPE: |
26469 | case FIXED_POINT_TYPE: |
26470 | case COMPLEX_TYPE: |
26471 | case BOOLEAN_TYPE: |
26472 | case BITINT_TYPE: |
26473 | /* No DIEs needed for fundamental types. */ |
26474 | break; |
26475 | |
26476 | case NULLPTR_TYPE: |
26477 | case LANG_TYPE: |
26478 | /* Just use DW_TAG_unspecified_type. */ |
26479 | { |
26480 | dw_die_ref type_die = lookup_type_die (type); |
26481 | if (type_die == NULL) |
26482 | { |
26483 | tree name = TYPE_IDENTIFIER (type); |
26484 | type_die = new_die (tag_value: DW_TAG_unspecified_type, parent_die: comp_unit_die (), |
26485 | t: type); |
26486 | add_name_attribute (die: type_die, IDENTIFIER_POINTER (name)); |
26487 | equate_type_number_to_die (type, type_die); |
26488 | } |
26489 | } |
26490 | break; |
26491 | |
26492 | default: |
26493 | if (is_cxx_auto (type)) |
26494 | { |
26495 | tree name = TYPE_IDENTIFIER (type); |
26496 | dw_die_ref *die = (name == get_identifier ("auto" ) |
26497 | ? &auto_die : &decltype_auto_die); |
26498 | if (!*die) |
26499 | { |
26500 | *die = new_die (tag_value: DW_TAG_unspecified_type, |
26501 | parent_die: comp_unit_die (), NULL_TREE); |
26502 | add_name_attribute (die: *die, IDENTIFIER_POINTER (name)); |
26503 | } |
26504 | equate_type_number_to_die (type, type_die: *die); |
26505 | break; |
26506 | } |
26507 | gcc_unreachable (); |
26508 | } |
26509 | |
26510 | TREE_ASM_WRITTEN (type) = 1; |
26511 | } |
26512 | |
26513 | static void |
26514 | gen_type_die (tree type, dw_die_ref context_die, bool reverse) |
26515 | { |
26516 | if (type != error_mark_node) |
26517 | { |
26518 | gen_type_die_with_usage (type, context_die, usage: DINFO_USAGE_DIR_USE, reverse); |
26519 | if (flag_checking) |
26520 | { |
26521 | dw_die_ref die = lookup_type_die (type); |
26522 | if (die) |
26523 | check_die (die); |
26524 | } |
26525 | } |
26526 | } |
26527 | |
26528 | /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the |
26529 | things which are local to the given block. */ |
26530 | |
26531 | static void |
26532 | gen_block_die (tree stmt, dw_die_ref context_die) |
26533 | { |
26534 | int must_output_die = 0; |
26535 | bool inlined_func; |
26536 | |
26537 | /* Ignore blocks that are NULL. */ |
26538 | if (stmt == NULL_TREE) |
26539 | return; |
26540 | |
26541 | inlined_func = inlined_function_outer_scope_p (block: stmt); |
26542 | |
26543 | /* If the block is one fragment of a non-contiguous block, do not |
26544 | process the variables, since they will have been done by the |
26545 | origin block. Do process subblocks. */ |
26546 | if (BLOCK_FRAGMENT_ORIGIN (stmt)) |
26547 | { |
26548 | tree sub; |
26549 | |
26550 | for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub)) |
26551 | gen_block_die (stmt: sub, context_die); |
26552 | |
26553 | return; |
26554 | } |
26555 | |
26556 | /* Determine if we need to output any Dwarf DIEs at all to represent this |
26557 | block. */ |
26558 | if (inlined_func) |
26559 | /* The outer scopes for inlinings *must* always be represented. We |
26560 | generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */ |
26561 | must_output_die = 1; |
26562 | else if (lookup_block_die (block: stmt)) |
26563 | /* If we already have a DIE then it was filled early. Meanwhile |
26564 | we might have pruned all BLOCK_VARS as optimized out but we |
26565 | still want to generate high/low PC attributes so output it. */ |
26566 | must_output_die = 1; |
26567 | else if (TREE_USED (stmt) |
26568 | || TREE_ASM_WRITTEN (stmt)) |
26569 | { |
26570 | /* Determine if this block directly contains any "significant" |
26571 | local declarations which we will need to output DIEs for. */ |
26572 | if (debug_info_level > DINFO_LEVEL_TERSE) |
26573 | { |
26574 | /* We are not in terse mode so any local declaration that |
26575 | is not ignored for debug purposes counts as being a |
26576 | "significant" one. */ |
26577 | if (BLOCK_NUM_NONLOCALIZED_VARS (stmt)) |
26578 | must_output_die = 1; |
26579 | else |
26580 | for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var)) |
26581 | if (!DECL_IGNORED_P (var)) |
26582 | { |
26583 | must_output_die = 1; |
26584 | break; |
26585 | } |
26586 | } |
26587 | else if (!dwarf2out_ignore_block (stmt)) |
26588 | must_output_die = 1; |
26589 | } |
26590 | |
26591 | /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block |
26592 | DIE for any block which contains no significant local declarations at |
26593 | all. Rather, in such cases we just call `decls_for_scope' so that any |
26594 | needed Dwarf info for any sub-blocks will get properly generated. Note |
26595 | that in terse mode, our definition of what constitutes a "significant" |
26596 | local declaration gets restricted to include only inlined function |
26597 | instances and local (nested) function definitions. */ |
26598 | if (must_output_die) |
26599 | { |
26600 | if (inlined_func) |
26601 | gen_inlined_subroutine_die (stmt, context_die); |
26602 | else |
26603 | gen_lexical_block_die (stmt, context_die); |
26604 | } |
26605 | else |
26606 | decls_for_scope (stmt, context_die); |
26607 | } |
26608 | |
26609 | /* Process variable DECL (or variable with origin ORIGIN) within |
26610 | block STMT and add it to CONTEXT_DIE. */ |
26611 | static void |
26612 | process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die) |
26613 | { |
26614 | dw_die_ref die; |
26615 | tree decl_or_origin = decl ? decl : origin; |
26616 | |
26617 | if (TREE_CODE (decl_or_origin) == FUNCTION_DECL) |
26618 | die = lookup_decl_die (decl: decl_or_origin); |
26619 | else if (TREE_CODE (decl_or_origin) == TYPE_DECL) |
26620 | { |
26621 | if (TYPE_DECL_IS_STUB (decl_or_origin)) |
26622 | die = lookup_type_die (TREE_TYPE (decl_or_origin)); |
26623 | else |
26624 | die = lookup_decl_die (decl: decl_or_origin); |
26625 | /* Avoid re-creating the DIE late if it was optimized as unused early. */ |
26626 | if (! die && ! early_dwarf) |
26627 | return; |
26628 | } |
26629 | else |
26630 | die = NULL; |
26631 | |
26632 | /* Avoid creating DIEs for local typedefs and concrete static variables that |
26633 | will only be pruned later. */ |
26634 | if ((origin || decl_ultimate_origin (decl)) |
26635 | && (TREE_CODE (decl_or_origin) == TYPE_DECL |
26636 | || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin)))) |
26637 | { |
26638 | origin = decl_ultimate_origin (decl: decl_or_origin); |
26639 | if (decl && VAR_P (decl) && die != NULL) |
26640 | { |
26641 | die = lookup_decl_die (decl: origin); |
26642 | if (die != NULL) |
26643 | equate_decl_number_to_die (decl, decl_die: die); |
26644 | } |
26645 | return; |
26646 | } |
26647 | |
26648 | if (die != NULL && die->die_parent == NULL) |
26649 | add_child_die (die: context_die, child_die: die); |
26650 | |
26651 | if (TREE_CODE (decl_or_origin) == IMPORTED_DECL) |
26652 | { |
26653 | if (early_dwarf) |
26654 | dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin), |
26655 | stmt, context_die); |
26656 | } |
26657 | else |
26658 | { |
26659 | if (decl && DECL_P (decl)) |
26660 | { |
26661 | die = lookup_decl_die (decl); |
26662 | |
26663 | /* Early created DIEs do not have a parent as the decls refer |
26664 | to the function as DECL_CONTEXT rather than the BLOCK. */ |
26665 | if (die && die->die_parent == NULL) |
26666 | { |
26667 | gcc_assert (in_lto_p); |
26668 | add_child_die (die: context_die, child_die: die); |
26669 | } |
26670 | } |
26671 | |
26672 | gen_decl_die (decl, origin, NULL, context_die); |
26673 | } |
26674 | } |
26675 | |
26676 | /* Generate all of the decls declared within a given scope and (recursively) |
26677 | all of its sub-blocks. */ |
26678 | |
26679 | static void |
26680 | decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse) |
26681 | { |
26682 | tree decl; |
26683 | unsigned int i; |
26684 | tree subblocks; |
26685 | |
26686 | /* Ignore NULL blocks. */ |
26687 | if (stmt == NULL_TREE) |
26688 | return; |
26689 | |
26690 | /* Output the DIEs to represent all of the data objects and typedefs |
26691 | declared directly within this block but not within any nested |
26692 | sub-blocks. Also, nested function and tag DIEs have been |
26693 | generated with a parent of NULL; fix that up now. We don't |
26694 | have to do this if we're at -g1. */ |
26695 | if (debug_info_level > DINFO_LEVEL_TERSE) |
26696 | { |
26697 | for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl)) |
26698 | process_scope_var (stmt, decl, NULL_TREE, context_die); |
26699 | /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract |
26700 | origin - avoid doing this twice as we have no good way to see |
26701 | if we've done it once already. */ |
26702 | if (! early_dwarf) |
26703 | for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++) |
26704 | { |
26705 | decl = BLOCK_NONLOCALIZED_VAR (stmt, i); |
26706 | if (decl == current_function_decl) |
26707 | /* Ignore declarations of the current function, while they |
26708 | are declarations, gen_subprogram_die would treat them |
26709 | as definitions again, because they are equal to |
26710 | current_function_decl and endlessly recurse. */; |
26711 | else if (TREE_CODE (decl) == FUNCTION_DECL) |
26712 | process_scope_var (stmt, decl, NULL_TREE, context_die); |
26713 | else |
26714 | process_scope_var (stmt, NULL_TREE, origin: decl, context_die); |
26715 | } |
26716 | } |
26717 | |
26718 | /* Even if we're at -g1, we need to process the subblocks in order to get |
26719 | inlined call information. */ |
26720 | |
26721 | /* Output the DIEs to represent all sub-blocks (and the items declared |
26722 | therein) of this block. */ |
26723 | if (recurse) |
26724 | for (subblocks = BLOCK_SUBBLOCKS (stmt); |
26725 | subblocks != NULL; |
26726 | subblocks = BLOCK_CHAIN (subblocks)) |
26727 | gen_block_die (stmt: subblocks, context_die); |
26728 | } |
26729 | |
26730 | /* Is this a typedef we can avoid emitting? */ |
26731 | |
26732 | static bool |
26733 | is_redundant_typedef (const_tree decl) |
26734 | { |
26735 | if (TYPE_DECL_IS_STUB (decl)) |
26736 | return true; |
26737 | |
26738 | if (DECL_ARTIFICIAL (decl) |
26739 | && DECL_CONTEXT (decl) |
26740 | && is_tagged_type (DECL_CONTEXT (decl)) |
26741 | && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL |
26742 | && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl)))) |
26743 | /* Also ignore the artificial member typedef for the class name. */ |
26744 | return true; |
26745 | |
26746 | return false; |
26747 | } |
26748 | |
26749 | /* Return TRUE if TYPE is a typedef that names a type for linkage |
26750 | purposes. This kind of typedefs is produced by the C++ FE for |
26751 | constructs like: |
26752 | |
26753 | typedef struct {...} foo; |
26754 | |
26755 | In that case, there is no typedef variant type produced for foo. |
26756 | Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous |
26757 | struct type. */ |
26758 | |
26759 | static bool |
26760 | is_naming_typedef_decl (const_tree decl) |
26761 | { |
26762 | if (decl == NULL_TREE |
26763 | || TREE_CODE (decl) != TYPE_DECL |
26764 | || DECL_NAMELESS (decl) |
26765 | || !is_tagged_type (TREE_TYPE (decl)) |
26766 | || DECL_IS_UNDECLARED_BUILTIN (decl) |
26767 | || is_redundant_typedef (decl) |
26768 | /* It looks like Ada produces TYPE_DECLs that are very similar |
26769 | to C++ naming typedefs but that have different |
26770 | semantics. Let's be specific to c++ for now. */ |
26771 | || !is_cxx (decl)) |
26772 | return false; |
26773 | |
26774 | return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE |
26775 | && TYPE_NAME (TREE_TYPE (decl)) == decl |
26776 | && (TYPE_STUB_DECL (TREE_TYPE (decl)) |
26777 | != TYPE_NAME (TREE_TYPE (decl)))); |
26778 | } |
26779 | |
26780 | /* Looks up the DIE for a context. */ |
26781 | |
26782 | static inline dw_die_ref |
26783 | lookup_context_die (tree context) |
26784 | { |
26785 | if (context) |
26786 | { |
26787 | /* Find die that represents this context. */ |
26788 | if (TYPE_P (context)) |
26789 | { |
26790 | context = TYPE_MAIN_VARIANT (context); |
26791 | dw_die_ref ctx = lookup_type_die (type: context); |
26792 | if (!ctx) |
26793 | return NULL; |
26794 | return strip_naming_typedef (type: context, type_die: ctx); |
26795 | } |
26796 | else |
26797 | return lookup_decl_die (decl: context); |
26798 | } |
26799 | return comp_unit_die (); |
26800 | } |
26801 | |
26802 | /* Returns the DIE for a context. */ |
26803 | |
26804 | static inline dw_die_ref |
26805 | get_context_die (tree context) |
26806 | { |
26807 | if (context) |
26808 | { |
26809 | /* Find die that represents this context. */ |
26810 | if (TYPE_P (context)) |
26811 | { |
26812 | context = TYPE_MAIN_VARIANT (context); |
26813 | return strip_naming_typedef (type: context, type_die: force_type_die (context)); |
26814 | } |
26815 | else |
26816 | return force_decl_die (context); |
26817 | } |
26818 | return comp_unit_die (); |
26819 | } |
26820 | |
26821 | /* Returns the DIE for decl. A DIE will always be returned. */ |
26822 | |
26823 | static dw_die_ref |
26824 | force_decl_die (tree decl) |
26825 | { |
26826 | dw_die_ref decl_die; |
26827 | unsigned saved_external_flag; |
26828 | tree save_fn = NULL_TREE; |
26829 | decl_die = lookup_decl_die (decl); |
26830 | if (!decl_die) |
26831 | { |
26832 | dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl)); |
26833 | |
26834 | decl_die = lookup_decl_die (decl); |
26835 | if (decl_die) |
26836 | return decl_die; |
26837 | |
26838 | switch (TREE_CODE (decl)) |
26839 | { |
26840 | case FUNCTION_DECL: |
26841 | /* Clear current_function_decl, so that gen_subprogram_die thinks |
26842 | that this is a declaration. At this point, we just want to force |
26843 | declaration die. */ |
26844 | save_fn = current_function_decl; |
26845 | current_function_decl = NULL_TREE; |
26846 | gen_subprogram_die (decl, context_die); |
26847 | current_function_decl = save_fn; |
26848 | break; |
26849 | |
26850 | case VAR_DECL: |
26851 | /* Set external flag to force declaration die. Restore it after |
26852 | gen_decl_die() call. */ |
26853 | saved_external_flag = DECL_EXTERNAL (decl); |
26854 | DECL_EXTERNAL (decl) = 1; |
26855 | gen_decl_die (decl, NULL, NULL, context_die); |
26856 | DECL_EXTERNAL (decl) = saved_external_flag; |
26857 | break; |
26858 | |
26859 | case NAMESPACE_DECL: |
26860 | if (dwarf_version >= 3 || !dwarf_strict) |
26861 | dwarf2out_decl (decl); |
26862 | else |
26863 | /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */ |
26864 | decl_die = comp_unit_die (); |
26865 | break; |
26866 | |
26867 | case CONST_DECL: |
26868 | /* Enumerators shouldn't need force_decl_die. */ |
26869 | gcc_assert (DECL_CONTEXT (decl) == NULL_TREE |
26870 | || TREE_CODE (DECL_CONTEXT (decl)) != ENUMERAL_TYPE); |
26871 | gen_decl_die (decl, NULL, NULL, context_die); |
26872 | break; |
26873 | |
26874 | case TRANSLATION_UNIT_DECL: |
26875 | decl_die = comp_unit_die (); |
26876 | break; |
26877 | |
26878 | default: |
26879 | gcc_unreachable (); |
26880 | } |
26881 | |
26882 | /* We should be able to find the DIE now. */ |
26883 | if (!decl_die) |
26884 | decl_die = lookup_decl_die (decl); |
26885 | gcc_assert (decl_die); |
26886 | } |
26887 | |
26888 | return decl_die; |
26889 | } |
26890 | |
26891 | /* Returns the DIE for TYPE, that must not be a base type. A DIE is |
26892 | always returned. */ |
26893 | |
26894 | static dw_die_ref |
26895 | force_type_die (tree type) |
26896 | { |
26897 | dw_die_ref type_die; |
26898 | |
26899 | type_die = lookup_type_die (type); |
26900 | if (!type_die) |
26901 | { |
26902 | dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type)); |
26903 | |
26904 | type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type), |
26905 | reverse: false, context_die); |
26906 | gcc_assert (type_die); |
26907 | } |
26908 | return type_die; |
26909 | } |
26910 | |
26911 | /* Force out any required namespaces to be able to output DECL, |
26912 | and return the new context_die for it, if it's changed. */ |
26913 | |
26914 | static dw_die_ref |
26915 | setup_namespace_context (tree thing, dw_die_ref context_die) |
26916 | { |
26917 | tree context = (DECL_P (thing) |
26918 | ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing)); |
26919 | if (context && TREE_CODE (context) == NAMESPACE_DECL) |
26920 | /* Force out the namespace. */ |
26921 | context_die = force_decl_die (decl: context); |
26922 | |
26923 | return context_die; |
26924 | } |
26925 | |
26926 | /* Emit a declaration DIE for THING (which is either a DECL or a tagged |
26927 | type) within its namespace, if appropriate. |
26928 | |
26929 | For compatibility with older debuggers, namespace DIEs only contain |
26930 | declarations; all definitions are emitted at CU scope, with |
26931 | DW_AT_specification pointing to the declaration (like with class |
26932 | members). */ |
26933 | |
26934 | static dw_die_ref |
26935 | declare_in_namespace (tree thing, dw_die_ref context_die) |
26936 | { |
26937 | dw_die_ref ns_context; |
26938 | |
26939 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
26940 | return context_die; |
26941 | |
26942 | /* External declarations in the local scope only need to be emitted |
26943 | once, not once in the namespace and once in the scope. |
26944 | |
26945 | This avoids declaring the `extern' below in the |
26946 | namespace DIE as well as in the innermost scope: |
26947 | |
26948 | namespace S |
26949 | { |
26950 | int i=5; |
26951 | int foo() |
26952 | { |
26953 | int i=8; |
26954 | extern int i; |
26955 | return i; |
26956 | } |
26957 | } |
26958 | */ |
26959 | if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die)) |
26960 | return context_die; |
26961 | |
26962 | /* If this decl is from an inlined function, then don't try to emit it in its |
26963 | namespace, as we will get confused. It would have already been emitted |
26964 | when the abstract instance of the inline function was emitted anyways. */ |
26965 | if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing)) |
26966 | return context_die; |
26967 | |
26968 | ns_context = setup_namespace_context (thing, context_die); |
26969 | |
26970 | if (ns_context != context_die) |
26971 | { |
26972 | if (is_fortran () || is_dlang ()) |
26973 | return ns_context; |
26974 | if (DECL_P (thing)) |
26975 | gen_decl_die (thing, NULL, NULL, ns_context); |
26976 | else |
26977 | gen_type_die (type: thing, context_die: ns_context); |
26978 | } |
26979 | return context_die; |
26980 | } |
26981 | |
26982 | /* Generate a DIE for a namespace or namespace alias. */ |
26983 | |
26984 | static void |
26985 | gen_namespace_die (tree decl, dw_die_ref context_die) |
26986 | { |
26987 | dw_die_ref namespace_die; |
26988 | |
26989 | /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace |
26990 | they are an alias of. */ |
26991 | if (DECL_ABSTRACT_ORIGIN (decl) == NULL) |
26992 | { |
26993 | /* Output a real namespace or module. */ |
26994 | context_die = setup_namespace_context (thing: decl, context_die: comp_unit_die ()); |
26995 | namespace_die = new_die (tag_value: is_fortran () || is_dlang () |
26996 | ? DW_TAG_module : DW_TAG_namespace, |
26997 | parent_die: context_die, t: decl); |
26998 | /* For Fortran modules defined in different CU don't add src coords. */ |
26999 | if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl)) |
27000 | { |
27001 | const char *name = dwarf2_name (decl, scope: 0); |
27002 | if (name) |
27003 | add_name_attribute (die: namespace_die, name_string: name); |
27004 | } |
27005 | else |
27006 | add_name_and_src_coords_attributes (die: namespace_die, decl); |
27007 | if (DECL_EXTERNAL (decl)) |
27008 | add_AT_flag (die: namespace_die, attr_kind: DW_AT_declaration, flag: 1); |
27009 | equate_decl_number_to_die (decl, decl_die: namespace_die); |
27010 | } |
27011 | else |
27012 | { |
27013 | /* Output a namespace alias. */ |
27014 | |
27015 | /* Force out the namespace we are an alias of, if necessary. */ |
27016 | dw_die_ref origin_die |
27017 | = force_decl_die (DECL_ABSTRACT_ORIGIN (decl)); |
27018 | |
27019 | if (DECL_FILE_SCOPE_P (decl) |
27020 | || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL) |
27021 | context_die = setup_namespace_context (thing: decl, context_die: comp_unit_die ()); |
27022 | /* Now create the namespace alias DIE. */ |
27023 | namespace_die = new_die (tag_value: DW_TAG_imported_declaration, parent_die: context_die, t: decl); |
27024 | add_name_and_src_coords_attributes (die: namespace_die, decl); |
27025 | add_AT_die_ref (die: namespace_die, attr_kind: DW_AT_import, targ_die: origin_die); |
27026 | equate_decl_number_to_die (decl, decl_die: namespace_die); |
27027 | } |
27028 | if ((dwarf_version >= 5 || !dwarf_strict) |
27029 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
27030 | DW_AT_export_symbols) == 1) |
27031 | add_AT_flag (die: namespace_die, attr_kind: DW_AT_export_symbols, flag: 1); |
27032 | |
27033 | /* Bypass dwarf2_name's check for DECL_NAMELESS. */ |
27034 | if (want_pubnames ()) |
27035 | add_pubname_string (str: lang_hooks.dwarf_name (decl, 1), die: namespace_die); |
27036 | } |
27037 | |
27038 | /* Generate Dwarf debug information for a decl described by DECL. |
27039 | The return value is currently only meaningful for PARM_DECLs, |
27040 | for all other decls it returns NULL. |
27041 | |
27042 | If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT. |
27043 | It can be NULL otherwise. */ |
27044 | |
27045 | static dw_die_ref |
27046 | gen_decl_die (tree decl, tree origin, struct vlr_context *ctx, |
27047 | dw_die_ref context_die) |
27048 | { |
27049 | tree decl_or_origin = decl ? decl : origin; |
27050 | tree class_origin = NULL, ultimate_origin; |
27051 | |
27052 | if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin)) |
27053 | return NULL; |
27054 | |
27055 | switch (TREE_CODE (decl_or_origin)) |
27056 | { |
27057 | case ERROR_MARK: |
27058 | break; |
27059 | |
27060 | case CONST_DECL: |
27061 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
27062 | { |
27063 | /* The individual enumerators of an enum type get output when we output |
27064 | the Dwarf representation of the relevant enum type itself. */ |
27065 | break; |
27066 | } |
27067 | |
27068 | /* Emit its type. */ |
27069 | gen_type_die (TREE_TYPE (decl), context_die); |
27070 | |
27071 | /* And its containing namespace. */ |
27072 | context_die = declare_in_namespace (thing: decl, context_die); |
27073 | |
27074 | gen_const_die (decl, context_die); |
27075 | break; |
27076 | |
27077 | case FUNCTION_DECL: |
27078 | #if 0 |
27079 | /* FIXME */ |
27080 | /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN |
27081 | on local redeclarations of global functions. That seems broken. */ |
27082 | if (current_function_decl != decl) |
27083 | /* This is only a declaration. */; |
27084 | #endif |
27085 | |
27086 | /* We should have abstract copies already and should not generate |
27087 | stray type DIEs in late LTO dumping. */ |
27088 | if (! early_dwarf) |
27089 | ; |
27090 | |
27091 | /* If we're emitting a clone, emit info for the abstract instance. */ |
27092 | else if (origin || DECL_ORIGIN (decl) != decl) |
27093 | dwarf2out_abstract_function (decl: origin |
27094 | ? DECL_ORIGIN (origin) |
27095 | : DECL_ABSTRACT_ORIGIN (decl)); |
27096 | |
27097 | /* If we're emitting a possibly inlined function emit it as |
27098 | abstract instance. */ |
27099 | else if (cgraph_function_possibly_inlined_p (decl) |
27100 | && ! DECL_ABSTRACT_P (decl) |
27101 | && ! class_or_namespace_scope_p (context_die) |
27102 | /* dwarf2out_abstract_function won't emit a die if this is just |
27103 | a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in |
27104 | that case, because that works only if we have a die. */ |
27105 | && DECL_INITIAL (decl) != NULL_TREE) |
27106 | dwarf2out_abstract_function (decl); |
27107 | |
27108 | /* Otherwise we're emitting the primary DIE for this decl. */ |
27109 | else if (debug_info_level > DINFO_LEVEL_TERSE) |
27110 | { |
27111 | /* Before we describe the FUNCTION_DECL itself, make sure that we |
27112 | have its containing type. */ |
27113 | if (!origin) |
27114 | origin = decl_class_context (decl); |
27115 | if (origin != NULL_TREE) |
27116 | gen_type_die (type: origin, context_die); |
27117 | |
27118 | /* And its return type. */ |
27119 | gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die); |
27120 | |
27121 | /* And its virtual context. */ |
27122 | if (DECL_VINDEX (decl) != NULL_TREE) |
27123 | gen_type_die (DECL_CONTEXT (decl), context_die); |
27124 | |
27125 | /* Make sure we have a member DIE for decl. */ |
27126 | if (origin != NULL_TREE) |
27127 | gen_type_die_for_member (type: origin, member: decl, context_die); |
27128 | |
27129 | /* And its containing namespace. */ |
27130 | context_die = declare_in_namespace (thing: decl, context_die); |
27131 | } |
27132 | |
27133 | /* Now output a DIE to represent the function itself. */ |
27134 | if (decl) |
27135 | gen_subprogram_die (decl, context_die); |
27136 | break; |
27137 | |
27138 | case TYPE_DECL: |
27139 | /* If we are in terse mode, don't generate any DIEs to represent any |
27140 | actual typedefs. */ |
27141 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27142 | break; |
27143 | |
27144 | /* In the special case of a TYPE_DECL node representing the declaration |
27145 | of some type tag, if the given TYPE_DECL is marked as having been |
27146 | instantiated from some other (original) TYPE_DECL node (e.g. one which |
27147 | was generated within the original definition of an inline function) we |
27148 | used to generate a special (abbreviated) DW_TAG_structure_type, |
27149 | DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing |
27150 | should be actually referencing those DIEs, as variable DIEs with that |
27151 | type would be emitted already in the abstract origin, so it was always |
27152 | removed during unused type prunning. Don't add anything in this |
27153 | case. */ |
27154 | if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE) |
27155 | break; |
27156 | |
27157 | if (is_redundant_typedef (decl)) |
27158 | gen_type_die (TREE_TYPE (decl), context_die); |
27159 | else |
27160 | /* Output a DIE to represent the typedef itself. */ |
27161 | gen_typedef_die (decl, context_die); |
27162 | break; |
27163 | |
27164 | case LABEL_DECL: |
27165 | if (debug_info_level >= DINFO_LEVEL_NORMAL) |
27166 | gen_label_die (decl, context_die); |
27167 | break; |
27168 | |
27169 | case VAR_DECL: |
27170 | case RESULT_DECL: |
27171 | /* If we are in terse mode, don't generate any DIEs to represent any |
27172 | variable declarations or definitions unless it is external. */ |
27173 | if (debug_info_level < DINFO_LEVEL_TERSE |
27174 | || (debug_info_level == DINFO_LEVEL_TERSE |
27175 | && !TREE_PUBLIC (decl_or_origin))) |
27176 | break; |
27177 | |
27178 | if (debug_info_level > DINFO_LEVEL_TERSE) |
27179 | { |
27180 | /* Avoid generating stray type DIEs during late dwarf dumping. |
27181 | All types have been dumped early. */ |
27182 | if (early_dwarf |
27183 | /* ??? But in LTRANS we cannot annotate early created variably |
27184 | modified type DIEs without copying them and adjusting all |
27185 | references to them. Dump them again as happens for inlining |
27186 | which copies both the decl and the types. */ |
27187 | /* ??? And even non-LTO needs to re-visit type DIEs to fill |
27188 | in VLA bound information for example. */ |
27189 | || (decl && variably_modified_type_p (TREE_TYPE (decl), |
27190 | current_function_decl))) |
27191 | { |
27192 | /* Output any DIEs that are needed to specify the type of this data |
27193 | object. */ |
27194 | if (decl_by_reference_p (decl: decl_or_origin)) |
27195 | gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die); |
27196 | else |
27197 | gen_type_die (TREE_TYPE (decl_or_origin), context_die); |
27198 | } |
27199 | |
27200 | if (early_dwarf) |
27201 | { |
27202 | /* And its containing type. */ |
27203 | class_origin = decl_class_context (decl: decl_or_origin); |
27204 | if (class_origin != NULL_TREE) |
27205 | gen_type_die_for_member (type: class_origin, member: decl_or_origin, context_die); |
27206 | |
27207 | /* And its containing namespace. */ |
27208 | context_die = declare_in_namespace (thing: decl_or_origin, context_die); |
27209 | } |
27210 | } |
27211 | |
27212 | /* Now output the DIE to represent the data object itself. This gets |
27213 | complicated because of the possibility that the VAR_DECL really |
27214 | represents an inlined instance of a formal parameter for an inline |
27215 | function. */ |
27216 | ultimate_origin = decl_ultimate_origin (decl: decl_or_origin); |
27217 | if (ultimate_origin != NULL_TREE |
27218 | && TREE_CODE (ultimate_origin) == PARM_DECL) |
27219 | gen_formal_parameter_die (node: decl, origin, |
27220 | emit_name_p: true /* Emit name attribute. */, |
27221 | context_die); |
27222 | else |
27223 | gen_variable_die (decl, origin, context_die); |
27224 | break; |
27225 | |
27226 | case FIELD_DECL: |
27227 | gcc_assert (ctx != NULL && ctx->struct_type != NULL); |
27228 | /* Ignore the nameless fields that are used to skip bits but handle C++ |
27229 | anonymous unions and structs. */ |
27230 | if (DECL_NAME (decl) != NULL_TREE |
27231 | || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE |
27232 | || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE) |
27233 | { |
27234 | gen_type_die (type: member_declared_type (member: decl), context_die); |
27235 | gen_field_die (decl, ctx, context_die); |
27236 | } |
27237 | break; |
27238 | |
27239 | case PARM_DECL: |
27240 | /* Avoid generating stray type DIEs during late dwarf dumping. |
27241 | All types have been dumped early. */ |
27242 | if (early_dwarf |
27243 | /* ??? But in LTRANS we cannot annotate early created variably |
27244 | modified type DIEs without copying them and adjusting all |
27245 | references to them. Dump them again as happens for inlining |
27246 | which copies both the decl and the types. */ |
27247 | /* ??? And even non-LTO needs to re-visit type DIEs to fill |
27248 | in VLA bound information for example. */ |
27249 | || (decl && variably_modified_type_p (TREE_TYPE (decl), |
27250 | current_function_decl))) |
27251 | { |
27252 | if (DECL_BY_REFERENCE (decl_or_origin)) |
27253 | gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die); |
27254 | else |
27255 | gen_type_die (TREE_TYPE (decl_or_origin), context_die); |
27256 | } |
27257 | return gen_formal_parameter_die (node: decl, origin, |
27258 | emit_name_p: true /* Emit name attribute. */, |
27259 | context_die); |
27260 | |
27261 | case NAMESPACE_DECL: |
27262 | if (dwarf_version >= 3 || !dwarf_strict) |
27263 | gen_namespace_die (decl, context_die); |
27264 | break; |
27265 | |
27266 | case IMPORTED_DECL: |
27267 | dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl), |
27268 | DECL_CONTEXT (decl), context_die); |
27269 | break; |
27270 | |
27271 | case NAMELIST_DECL: |
27272 | gen_namelist_decl (DECL_NAME (decl), context_die, |
27273 | NAMELIST_DECL_ASSOCIATED_DECL (decl)); |
27274 | break; |
27275 | |
27276 | default: |
27277 | /* Probably some frontend-internal decl. Assume we don't care. */ |
27278 | gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES); |
27279 | break; |
27280 | } |
27281 | |
27282 | return NULL; |
27283 | } |
27284 | |
27285 | /* Output initial debug information for global DECL. Called at the |
27286 | end of the parsing process. |
27287 | |
27288 | This is the initial debug generation process. As such, the DIEs |
27289 | generated may be incomplete. A later debug generation pass |
27290 | (dwarf2out_late_global_decl) will augment the information generated |
27291 | in this pass (e.g., with complete location info). */ |
27292 | |
27293 | static void |
27294 | dwarf2out_early_global_decl (tree decl) |
27295 | { |
27296 | set_early_dwarf s; |
27297 | |
27298 | /* gen_decl_die() will set DECL_ABSTRACT because |
27299 | cgraph_function_possibly_inlined_p() returns true. This is in |
27300 | turn will cause DW_AT_inline attributes to be set. |
27301 | |
27302 | This happens because at early dwarf generation, there is no |
27303 | cgraph information, causing cgraph_function_possibly_inlined_p() |
27304 | to return true. Trick cgraph_function_possibly_inlined_p() |
27305 | while we generate dwarf early. */ |
27306 | bool save = symtab->global_info_ready; |
27307 | symtab->global_info_ready = true; |
27308 | |
27309 | /* We don't handle TYPE_DECLs. If required, they'll be reached via |
27310 | other DECLs and they can point to template types or other things |
27311 | that dwarf2out can't handle when done via dwarf2out_decl. */ |
27312 | if (TREE_CODE (decl) != TYPE_DECL |
27313 | && TREE_CODE (decl) != PARM_DECL) |
27314 | { |
27315 | if (TREE_CODE (decl) == FUNCTION_DECL) |
27316 | { |
27317 | tree save_fndecl = current_function_decl; |
27318 | |
27319 | /* For nested functions, make sure we have DIEs for the parents first |
27320 | so that all nested DIEs are generated at the proper scope in the |
27321 | first shot. */ |
27322 | tree context = decl_function_context (decl); |
27323 | if (context != NULL) |
27324 | { |
27325 | dw_die_ref context_die = lookup_decl_die (decl: context); |
27326 | current_function_decl = context; |
27327 | |
27328 | /* Avoid emitting DIEs multiple times, but still process CONTEXT |
27329 | enough so that it lands in its own context. This avoids type |
27330 | pruning issues later on. */ |
27331 | if (context_die == NULL || is_declaration_die (die: context_die)) |
27332 | dwarf2out_early_global_decl (decl: context); |
27333 | } |
27334 | |
27335 | /* Emit an abstract origin of a function first. This happens |
27336 | with C++ constructor clones for example and makes |
27337 | dwarf2out_abstract_function happy which requires the early |
27338 | DIE of the abstract instance to be present. */ |
27339 | tree origin = DECL_ABSTRACT_ORIGIN (decl); |
27340 | dw_die_ref origin_die; |
27341 | if (origin != NULL |
27342 | /* Do not emit the DIE multiple times but make sure to |
27343 | process it fully here in case we just saw a declaration. */ |
27344 | && ((origin_die = lookup_decl_die (decl: origin)) == NULL |
27345 | || is_declaration_die (die: origin_die))) |
27346 | { |
27347 | current_function_decl = origin; |
27348 | dwarf2out_decl (origin); |
27349 | } |
27350 | |
27351 | /* Emit the DIE for decl but avoid doing that multiple times. */ |
27352 | dw_die_ref old_die; |
27353 | if ((old_die = lookup_decl_die (decl)) == NULL |
27354 | || is_declaration_die (die: old_die)) |
27355 | { |
27356 | current_function_decl = decl; |
27357 | dwarf2out_decl (decl); |
27358 | } |
27359 | |
27360 | current_function_decl = save_fndecl; |
27361 | } |
27362 | else |
27363 | dwarf2out_decl (decl); |
27364 | } |
27365 | symtab->global_info_ready = save; |
27366 | } |
27367 | |
27368 | /* Return whether EXPR is an expression with the following pattern: |
27369 | INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */ |
27370 | |
27371 | static bool |
27372 | is_trivial_indirect_ref (tree expr) |
27373 | { |
27374 | if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF) |
27375 | return false; |
27376 | |
27377 | tree nop = TREE_OPERAND (expr, 0); |
27378 | if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR) |
27379 | return false; |
27380 | |
27381 | tree int_cst = TREE_OPERAND (nop, 0); |
27382 | return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST; |
27383 | } |
27384 | |
27385 | /* Output debug information for global decl DECL. Called from |
27386 | toplev.cc after compilation proper has finished. */ |
27387 | |
27388 | static void |
27389 | dwarf2out_late_global_decl (tree decl) |
27390 | { |
27391 | /* Fill-in any location information we were unable to determine |
27392 | on the first pass. */ |
27393 | if (VAR_P (decl)) |
27394 | { |
27395 | dw_die_ref die = lookup_decl_die (decl); |
27396 | |
27397 | /* We may have to generate full debug late for LTO in case debug |
27398 | was not enabled at compile-time or the target doesn't support |
27399 | the LTO early debug scheme. */ |
27400 | if (! die && in_lto_p |
27401 | /* Function scope variables are emitted when emitting the |
27402 | DIE for the function. */ |
27403 | && ! local_function_static (decl)) |
27404 | dwarf2out_decl (decl); |
27405 | else if (die) |
27406 | { |
27407 | /* We get called via the symtab code invoking late_global_decl |
27408 | for symbols that are optimized out. |
27409 | |
27410 | Do not add locations for those, except if they have a |
27411 | DECL_VALUE_EXPR, in which case they are relevant for debuggers. |
27412 | Still don't add a location if the DECL_VALUE_EXPR is not a trivial |
27413 | INDIRECT_REF expression, as this could generate relocations to |
27414 | text symbols in LTO object files, which is invalid. */ |
27415 | varpool_node *node = varpool_node::get (decl); |
27416 | if ((! node || ! node->definition) |
27417 | && ! (DECL_HAS_VALUE_EXPR_P (decl) |
27418 | && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl)))) |
27419 | tree_add_const_value_attribute_for_decl (var_die: die, decl); |
27420 | else |
27421 | add_location_or_const_value_attribute (die, decl, cache_p: false); |
27422 | } |
27423 | } |
27424 | } |
27425 | |
27426 | /* Output debug information for type decl DECL. Called from toplev.cc |
27427 | and from language front ends (to record built-in types). */ |
27428 | static void |
27429 | dwarf2out_type_decl (tree decl, int local) |
27430 | { |
27431 | if (!local) |
27432 | { |
27433 | set_early_dwarf s; |
27434 | dwarf2out_decl (decl); |
27435 | } |
27436 | } |
27437 | |
27438 | /* Output debug information for imported module or decl DECL. |
27439 | NAME is non-NULL name in the lexical block if the decl has been renamed. |
27440 | LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK) |
27441 | that DECL belongs to. |
27442 | LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */ |
27443 | static void |
27444 | dwarf2out_imported_module_or_decl_1 (tree decl, |
27445 | tree name, |
27446 | tree lexical_block, |
27447 | dw_die_ref lexical_block_die) |
27448 | { |
27449 | expanded_location xloc; |
27450 | dw_die_ref imported_die = NULL; |
27451 | dw_die_ref at_import_die; |
27452 | |
27453 | if (TREE_CODE (decl) == IMPORTED_DECL) |
27454 | { |
27455 | xloc = expand_location (DECL_SOURCE_LOCATION (decl)); |
27456 | decl = IMPORTED_DECL_ASSOCIATED_DECL (decl); |
27457 | gcc_assert (decl); |
27458 | } |
27459 | else |
27460 | xloc = expand_location (input_location); |
27461 | |
27462 | if (TREE_CODE (decl) == TYPE_DECL) |
27463 | { |
27464 | at_import_die = force_type_die (TREE_TYPE (decl)); |
27465 | /* For namespace N { typedef void T; } using N::T; base_type_die |
27466 | returns NULL, but DW_TAG_imported_declaration requires |
27467 | the DW_AT_import tag. Force creation of DW_TAG_typedef. */ |
27468 | if (!at_import_die) |
27469 | { |
27470 | gcc_assert (TREE_CODE (decl) == TYPE_DECL); |
27471 | gen_typedef_die (decl, context_die: get_context_die (DECL_CONTEXT (decl))); |
27472 | at_import_die = lookup_type_die (TREE_TYPE (decl)); |
27473 | gcc_assert (at_import_die); |
27474 | } |
27475 | } |
27476 | else |
27477 | { |
27478 | at_import_die = lookup_decl_die (decl); |
27479 | if (!at_import_die) |
27480 | { |
27481 | /* If we're trying to avoid duplicate debug info, we may not have |
27482 | emitted the member decl for this field. Emit it now. */ |
27483 | if (TREE_CODE (decl) == FIELD_DECL) |
27484 | { |
27485 | tree type = DECL_CONTEXT (decl); |
27486 | |
27487 | if (TYPE_CONTEXT (type) |
27488 | && TYPE_P (TYPE_CONTEXT (type)) |
27489 | && !should_emit_struct_debug (TYPE_CONTEXT (type), |
27490 | usage: DINFO_USAGE_DIR_USE)) |
27491 | return; |
27492 | gen_type_die_for_member (type, member: decl, |
27493 | context_die: get_context_die (TYPE_CONTEXT (type))); |
27494 | } |
27495 | if (TREE_CODE (decl) == CONST_DECL) |
27496 | { |
27497 | /* Individual enumerators of an enum type do not get output here |
27498 | (see gen_decl_die), so we cannot call force_decl_die. */ |
27499 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
27500 | return; |
27501 | } |
27502 | if (TREE_CODE (decl) == NAMELIST_DECL) |
27503 | at_import_die = gen_namelist_decl (DECL_NAME (decl), |
27504 | get_context_die (DECL_CONTEXT (decl)), |
27505 | NULL_TREE); |
27506 | else |
27507 | at_import_die = force_decl_die (decl); |
27508 | } |
27509 | } |
27510 | |
27511 | if (TREE_CODE (decl) == NAMESPACE_DECL) |
27512 | { |
27513 | if (dwarf_version >= 3 || !dwarf_strict) |
27514 | imported_die = new_die (tag_value: DW_TAG_imported_module, |
27515 | parent_die: lexical_block_die, |
27516 | t: lexical_block); |
27517 | else |
27518 | return; |
27519 | } |
27520 | else |
27521 | imported_die = new_die (tag_value: DW_TAG_imported_declaration, |
27522 | parent_die: lexical_block_die, |
27523 | t: lexical_block); |
27524 | |
27525 | add_AT_file (die: imported_die, attr_kind: DW_AT_decl_file, fd: lookup_filename (xloc.file)); |
27526 | add_AT_unsigned (die: imported_die, attr_kind: DW_AT_decl_line, unsigned_val: xloc.line); |
27527 | if (debug_column_info && xloc.column) |
27528 | add_AT_unsigned (die: imported_die, attr_kind: DW_AT_decl_column, unsigned_val: xloc.column); |
27529 | if (name) |
27530 | add_AT_string (die: imported_die, attr_kind: DW_AT_name, |
27531 | IDENTIFIER_POINTER (name)); |
27532 | add_AT_die_ref (die: imported_die, attr_kind: DW_AT_import, targ_die: at_import_die); |
27533 | } |
27534 | |
27535 | /* Output debug information for imported module or decl DECL. |
27536 | NAME is non-NULL name in context if the decl has been renamed. |
27537 | CHILD is true if decl is one of the renamed decls as part of |
27538 | importing whole module. |
27539 | IMPLICIT is set if this hook is called for an implicit import |
27540 | such as inline namespace. */ |
27541 | |
27542 | static void |
27543 | dwarf2out_imported_module_or_decl (tree decl, tree name, tree context, |
27544 | bool child, bool implicit) |
27545 | { |
27546 | /* dw_die_ref at_import_die; */ |
27547 | dw_die_ref scope_die; |
27548 | |
27549 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27550 | return; |
27551 | |
27552 | gcc_assert (decl); |
27553 | |
27554 | /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace |
27555 | should be enough, for DWARF4 and older even if we emit as extension |
27556 | DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway |
27557 | for the benefit of consumers unaware of DW_AT_export_symbols. */ |
27558 | if (implicit |
27559 | && dwarf_version >= 5 |
27560 | && lang_hooks.decls.decl_dwarf_attribute (decl, |
27561 | DW_AT_export_symbols) == 1) |
27562 | return; |
27563 | |
27564 | set_early_dwarf s; |
27565 | |
27566 | /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs. |
27567 | We need decl DIE for reference and scope die. First, get DIE for the decl |
27568 | itself. */ |
27569 | |
27570 | /* Get the scope die for decl context. Use comp_unit_die for global module |
27571 | or decl. If die is not found for non globals, force new die. */ |
27572 | if (context |
27573 | && TYPE_P (context) |
27574 | && !should_emit_struct_debug (type: context, usage: DINFO_USAGE_DIR_USE)) |
27575 | return; |
27576 | |
27577 | scope_die = get_context_die (context); |
27578 | |
27579 | if (child) |
27580 | { |
27581 | /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so |
27582 | there is nothing we can do, here. */ |
27583 | if (dwarf_version < 3 && dwarf_strict) |
27584 | return; |
27585 | |
27586 | gcc_assert (scope_die->die_child); |
27587 | gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module); |
27588 | gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL); |
27589 | scope_die = scope_die->die_child; |
27590 | } |
27591 | |
27592 | /* OK, now we have DIEs for decl as well as scope. Emit imported die. */ |
27593 | dwarf2out_imported_module_or_decl_1 (decl, name, lexical_block: context, lexical_block_die: scope_die); |
27594 | } |
27595 | |
27596 | /* Output debug information for namelists. */ |
27597 | |
27598 | static dw_die_ref |
27599 | gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls) |
27600 | { |
27601 | dw_die_ref nml_die, nml_item_die, nml_item_ref_die; |
27602 | tree value; |
27603 | unsigned i; |
27604 | |
27605 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27606 | return NULL; |
27607 | |
27608 | gcc_assert (scope_die != NULL); |
27609 | nml_die = new_die (tag_value: DW_TAG_namelist, parent_die: scope_die, NULL); |
27610 | add_AT_string (die: nml_die, attr_kind: DW_AT_name, IDENTIFIER_POINTER (name)); |
27611 | |
27612 | /* If there are no item_decls, we have a nondefining namelist, e.g. |
27613 | with USE association; hence, set DW_AT_declaration. */ |
27614 | if (item_decls == NULL_TREE) |
27615 | { |
27616 | add_AT_flag (die: nml_die, attr_kind: DW_AT_declaration, flag: 1); |
27617 | return nml_die; |
27618 | } |
27619 | |
27620 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value) |
27621 | { |
27622 | nml_item_ref_die = lookup_decl_die (decl: value); |
27623 | if (!nml_item_ref_die) |
27624 | nml_item_ref_die = force_decl_die (decl: value); |
27625 | |
27626 | nml_item_die = new_die (tag_value: DW_TAG_namelist_item, parent_die: nml_die, NULL); |
27627 | add_AT_die_ref (die: nml_item_die, attr_kind: DW_AT_namelist_item, targ_die: nml_item_ref_die); |
27628 | } |
27629 | return nml_die; |
27630 | } |
27631 | |
27632 | |
27633 | /* Write the debugging output for DECL and return the DIE. */ |
27634 | |
27635 | static void |
27636 | dwarf2out_decl (tree decl) |
27637 | { |
27638 | dw_die_ref context_die = comp_unit_die (); |
27639 | |
27640 | switch (TREE_CODE (decl)) |
27641 | { |
27642 | case ERROR_MARK: |
27643 | return; |
27644 | |
27645 | case FUNCTION_DECL: |
27646 | /* If we're a nested function, initially use a parent of NULL; if we're |
27647 | a plain function, this will be fixed up in decls_for_scope. If |
27648 | we're a method, it will be ignored, since we already have a DIE. |
27649 | Avoid doing this late though since clones of class methods may |
27650 | otherwise end up in limbo and create type DIEs late. */ |
27651 | if (early_dwarf |
27652 | && decl_function_context (decl) |
27653 | /* But if we're in terse mode, we don't care about scope. */ |
27654 | && debug_info_level > DINFO_LEVEL_TERSE) |
27655 | context_die = NULL; |
27656 | break; |
27657 | |
27658 | case VAR_DECL: |
27659 | /* For local statics lookup proper context die. */ |
27660 | if (local_function_static (decl)) |
27661 | context_die = lookup_decl_die (DECL_CONTEXT (decl)); |
27662 | |
27663 | /* If we are in terse mode, don't generate any DIEs to represent any |
27664 | variable declarations or definitions unless it is external. */ |
27665 | if (debug_info_level < DINFO_LEVEL_TERSE |
27666 | || (debug_info_level == DINFO_LEVEL_TERSE |
27667 | && !TREE_PUBLIC (decl))) |
27668 | return; |
27669 | break; |
27670 | |
27671 | case CONST_DECL: |
27672 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27673 | return; |
27674 | if (!is_fortran () && !is_ada () && !is_dlang ()) |
27675 | return; |
27676 | if (TREE_STATIC (decl) && decl_function_context (decl)) |
27677 | context_die = lookup_decl_die (DECL_CONTEXT (decl)); |
27678 | break; |
27679 | |
27680 | case NAMESPACE_DECL: |
27681 | case IMPORTED_DECL: |
27682 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27683 | return; |
27684 | if (lookup_decl_die (decl) != NULL) |
27685 | return; |
27686 | break; |
27687 | |
27688 | case TYPE_DECL: |
27689 | /* Don't emit stubs for types unless they are needed by other DIEs. */ |
27690 | if (TYPE_DECL_SUPPRESS_DEBUG (decl)) |
27691 | return; |
27692 | |
27693 | /* Don't bother trying to generate any DIEs to represent any of the |
27694 | normal built-in types for the language we are compiling. */ |
27695 | if (DECL_IS_UNDECLARED_BUILTIN (decl)) |
27696 | return; |
27697 | |
27698 | /* If we are in terse mode, don't generate any DIEs for types. */ |
27699 | if (debug_info_level <= DINFO_LEVEL_TERSE) |
27700 | return; |
27701 | |
27702 | /* If we're a function-scope tag, initially use a parent of NULL; |
27703 | this will be fixed up in decls_for_scope. */ |
27704 | if (decl_function_context (decl)) |
27705 | context_die = NULL; |
27706 | |
27707 | break; |
27708 | |
27709 | case NAMELIST_DECL: |
27710 | break; |
27711 | |
27712 | default: |
27713 | return; |
27714 | } |
27715 | |
27716 | gen_decl_die (decl, NULL, NULL, context_die); |
27717 | |
27718 | if (flag_checking) |
27719 | { |
27720 | dw_die_ref die = lookup_decl_die (decl); |
27721 | if (die) |
27722 | check_die (die); |
27723 | } |
27724 | } |
27725 | |
27726 | /* Write the debugging output for DECL. */ |
27727 | |
27728 | static void |
27729 | dwarf2out_function_decl (tree decl) |
27730 | { |
27731 | dwarf2out_decl (decl); |
27732 | call_arg_locations = NULL; |
27733 | call_arg_loc_last = NULL; |
27734 | call_site_count = -1; |
27735 | tail_call_site_count = -1; |
27736 | decl_loc_table->empty (); |
27737 | cached_dw_loc_list_table->empty (); |
27738 | } |
27739 | |
27740 | /* Output a marker (i.e. a label) for the beginning of the generated code for |
27741 | a lexical block. */ |
27742 | |
27743 | static void |
27744 | dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED, |
27745 | unsigned int blocknum) |
27746 | { |
27747 | switch_to_section (current_function_section ()); |
27748 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum); |
27749 | } |
27750 | |
27751 | /* Output a marker (i.e. a label) for the end of the generated code for a |
27752 | lexical block. */ |
27753 | |
27754 | static void |
27755 | dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum) |
27756 | { |
27757 | switch_to_section (current_function_section ()); |
27758 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum); |
27759 | } |
27760 | |
27761 | /* Returns true if it is appropriate not to emit any debugging |
27762 | information for BLOCK, because it doesn't contain any instructions. |
27763 | |
27764 | Don't allow this for blocks with nested functions or local classes |
27765 | as we would end up with orphans, and in the presence of scheduling |
27766 | we may end up calling them anyway. */ |
27767 | |
27768 | static bool |
27769 | dwarf2out_ignore_block (const_tree block) |
27770 | { |
27771 | tree decl; |
27772 | unsigned int i; |
27773 | |
27774 | for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl)) |
27775 | if (TREE_CODE (decl) == FUNCTION_DECL |
27776 | || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl))) |
27777 | return false; |
27778 | for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++) |
27779 | { |
27780 | decl = BLOCK_NONLOCALIZED_VAR (block, i); |
27781 | if (TREE_CODE (decl) == FUNCTION_DECL |
27782 | || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl))) |
27783 | return false; |
27784 | } |
27785 | |
27786 | return true; |
27787 | } |
27788 | |
27789 | /* Hash table routines for file_hash. */ |
27790 | |
27791 | bool |
27792 | dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2) |
27793 | { |
27794 | return filename_cmp (s1: p1->key, s2: p2) == 0; |
27795 | } |
27796 | |
27797 | hashval_t |
27798 | dwarf_file_hasher::hash (dwarf_file_data *p) |
27799 | { |
27800 | return htab_hash_string (p->key); |
27801 | } |
27802 | |
27803 | /* Lookup FILE_NAME (in the list of filenames that we know about here in |
27804 | dwarf2out.cc) and return its "index". The index of each (known) filename is |
27805 | just a unique number which is associated with only that one filename. We |
27806 | need such numbers for the sake of generating labels (in the .debug_sfnames |
27807 | section) and references to those files numbers (in the .debug_srcinfo |
27808 | and .debug_macinfo sections). If the filename given as an argument is not |
27809 | found in our current list, add it to the list and assign it the next |
27810 | available unique index number. */ |
27811 | |
27812 | static struct dwarf_file_data * |
27813 | lookup_filename (const char *file_name) |
27814 | { |
27815 | struct dwarf_file_data * created; |
27816 | |
27817 | if (!file_name) |
27818 | return NULL; |
27819 | |
27820 | if (!file_name[0]) |
27821 | file_name = "<stdin>" ; |
27822 | |
27823 | dwarf_file_data **slot |
27824 | = file_table->find_slot_with_hash (comparable: file_name, hash: htab_hash_string (file_name), |
27825 | insert: INSERT); |
27826 | if (*slot) |
27827 | return *slot; |
27828 | |
27829 | created = ggc_alloc<dwarf_file_data> (); |
27830 | created->key = file_name; |
27831 | created->filename = remap_debug_filename (file_name); |
27832 | created->emitted_number = 0; |
27833 | *slot = created; |
27834 | return created; |
27835 | } |
27836 | |
27837 | /* If the assembler will construct the file table, then translate the compiler |
27838 | internal file table number into the assembler file table number, and emit |
27839 | a .file directive if we haven't already emitted one yet. The file table |
27840 | numbers are different because we prune debug info for unused variables and |
27841 | types, which may include filenames. */ |
27842 | |
27843 | static int |
27844 | maybe_emit_file (struct dwarf_file_data * fd) |
27845 | { |
27846 | if (! fd->emitted_number) |
27847 | { |
27848 | if (last_emitted_file) |
27849 | fd->emitted_number = last_emitted_file->emitted_number + 1; |
27850 | else |
27851 | fd->emitted_number = 1; |
27852 | last_emitted_file = fd; |
27853 | |
27854 | if (output_asm_line_debug_info ()) |
27855 | { |
27856 | fprintf (stream: asm_out_file, format: "\t.file %u " , fd->emitted_number); |
27857 | output_quoted_string (asm_out_file, fd->filename); |
27858 | fputc (c: '\n', stream: asm_out_file); |
27859 | } |
27860 | } |
27861 | |
27862 | return fd->emitted_number; |
27863 | } |
27864 | |
27865 | /* Schedule generation of a DW_AT_const_value attribute to DIE. |
27866 | That generation should happen after function debug info has been |
27867 | generated. The value of the attribute is the constant value of ARG. */ |
27868 | |
27869 | static void |
27870 | append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg) |
27871 | { |
27872 | die_arg_entry entry; |
27873 | |
27874 | if (!die || !arg) |
27875 | return; |
27876 | |
27877 | gcc_assert (early_dwarf); |
27878 | |
27879 | if (!tmpl_value_parm_die_table) |
27880 | vec_alloc (v&: tmpl_value_parm_die_table, nelems: 32); |
27881 | |
27882 | entry.die = die; |
27883 | entry.arg = arg; |
27884 | vec_safe_push (v&: tmpl_value_parm_die_table, obj: entry); |
27885 | } |
27886 | |
27887 | /* Return TRUE if T is an instance of generic type, FALSE |
27888 | otherwise. */ |
27889 | |
27890 | static bool |
27891 | generic_type_p (tree t) |
27892 | { |
27893 | if (t == NULL_TREE || !TYPE_P (t)) |
27894 | return false; |
27895 | return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE; |
27896 | } |
27897 | |
27898 | /* Schedule the generation of the generic parameter dies for the |
27899 | instance of generic type T. The proper generation itself is later |
27900 | done by gen_scheduled_generic_parms_dies. */ |
27901 | |
27902 | static void |
27903 | schedule_generic_params_dies_gen (tree t) |
27904 | { |
27905 | if (!generic_type_p (t)) |
27906 | return; |
27907 | |
27908 | gcc_assert (early_dwarf); |
27909 | |
27910 | if (!generic_type_instances) |
27911 | vec_alloc (v&: generic_type_instances, nelems: 256); |
27912 | |
27913 | vec_safe_push (v&: generic_type_instances, obj: t); |
27914 | } |
27915 | |
27916 | /* Add a DW_AT_const_value attribute to DIEs that were scheduled |
27917 | by append_entry_to_tmpl_value_parm_die_table. This function must |
27918 | be called after function DIEs have been generated. */ |
27919 | |
27920 | static void |
27921 | gen_remaining_tmpl_value_param_die_attribute (void) |
27922 | { |
27923 | if (tmpl_value_parm_die_table) |
27924 | { |
27925 | unsigned i, j; |
27926 | die_arg_entry *e; |
27927 | |
27928 | /* We do this in two phases - first get the cases we can |
27929 | handle during early-finish, preserving those we cannot |
27930 | (containing symbolic constants where we don't yet know |
27931 | whether we are going to output the referenced symbols). |
27932 | For those we try again at late-finish. */ |
27933 | j = 0; |
27934 | FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e) |
27935 | { |
27936 | if (!e->die->removed |
27937 | && !tree_add_const_value_attribute (die: e->die, t: e->arg)) |
27938 | { |
27939 | dw_loc_descr_ref loc = NULL; |
27940 | if (! early_dwarf |
27941 | && (dwarf_version >= 5 || !dwarf_strict)) |
27942 | loc = loc_descriptor_from_tree (loc: e->arg, want_address: 2, NULL); |
27943 | if (loc) |
27944 | add_AT_loc (die: e->die, attr_kind: DW_AT_location, loc); |
27945 | else |
27946 | (*tmpl_value_parm_die_table)[j++] = *e; |
27947 | } |
27948 | } |
27949 | tmpl_value_parm_die_table->truncate (size: j); |
27950 | } |
27951 | } |
27952 | |
27953 | /* Generate generic parameters DIEs for instances of generic types |
27954 | that have been previously scheduled by |
27955 | schedule_generic_params_dies_gen. This function must be called |
27956 | after all the types of the CU have been laid out. */ |
27957 | |
27958 | static void |
27959 | gen_scheduled_generic_parms_dies (void) |
27960 | { |
27961 | unsigned i; |
27962 | tree t; |
27963 | |
27964 | if (!generic_type_instances) |
27965 | return; |
27966 | |
27967 | FOR_EACH_VEC_ELT (*generic_type_instances, i, t) |
27968 | if (COMPLETE_TYPE_P (t)) |
27969 | gen_generic_params_dies (t); |
27970 | |
27971 | generic_type_instances = NULL; |
27972 | } |
27973 | |
27974 | |
27975 | /* Replace DW_AT_name for the decl with name. */ |
27976 | |
27977 | static void |
27978 | dwarf2out_set_name (tree decl, tree name) |
27979 | { |
27980 | dw_die_ref die; |
27981 | dw_attr_node *attr; |
27982 | const char *dname; |
27983 | |
27984 | die = TYPE_SYMTAB_DIE (decl); |
27985 | if (!die) |
27986 | return; |
27987 | |
27988 | dname = dwarf2_name (decl: name, scope: 0); |
27989 | if (!dname) |
27990 | return; |
27991 | |
27992 | attr = get_AT (die, attr_kind: DW_AT_name); |
27993 | if (attr) |
27994 | { |
27995 | struct indirect_string_node *node; |
27996 | |
27997 | node = find_AT_string (str: dname); |
27998 | /* replace the string. */ |
27999 | attr->dw_attr_val.v.val_str = node; |
28000 | } |
28001 | |
28002 | else |
28003 | add_name_attribute (die, name_string: dname); |
28004 | } |
28005 | |
28006 | /* True if before or during processing of the first function being emitted. */ |
28007 | static bool in_first_function_p = true; |
28008 | /* True if loc_note during dwarf2out_var_location call might still be |
28009 | before first real instruction at address equal to .Ltext0. */ |
28010 | static bool maybe_at_text_label_p = true; |
28011 | /* One above highest N where .LVLN label might be equal to .Ltext0 label. */ |
28012 | static unsigned int first_loclabel_num_not_at_text_label; |
28013 | |
28014 | /* Look ahead for a real insn. */ |
28015 | |
28016 | static rtx_insn * |
28017 | dwarf2out_next_real_insn (rtx_insn *loc_note) |
28018 | { |
28019 | rtx_insn *next_real = NEXT_INSN (insn: loc_note); |
28020 | |
28021 | while (next_real) |
28022 | if (INSN_P (next_real)) |
28023 | break; |
28024 | else |
28025 | next_real = NEXT_INSN (insn: next_real); |
28026 | |
28027 | return next_real; |
28028 | } |
28029 | |
28030 | /* Called by the final INSN scan whenever we see a var location. We |
28031 | use it to drop labels in the right places, and throw the location in |
28032 | our lookup table. */ |
28033 | |
28034 | static void |
28035 | dwarf2out_var_location (rtx_insn *loc_note) |
28036 | { |
28037 | char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2]; |
28038 | struct var_loc_node *newloc; |
28039 | rtx_insn *next_real; |
28040 | rtx_insn *call_insn = NULL; |
28041 | static const char *last_label; |
28042 | static const char *last_postcall_label; |
28043 | static bool last_in_cold_section_p; |
28044 | static rtx_insn *expected_next_loc_note; |
28045 | tree decl; |
28046 | bool var_loc_p; |
28047 | var_loc_view view = 0; |
28048 | |
28049 | if (!NOTE_P (loc_note)) |
28050 | { |
28051 | if (CALL_P (loc_note)) |
28052 | { |
28053 | maybe_reset_location_view (insn: loc_note, table: cur_line_info_table); |
28054 | call_site_count++; |
28055 | if (SIBLING_CALL_P (loc_note)) |
28056 | tail_call_site_count++; |
28057 | if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX)) |
28058 | { |
28059 | call_insn = loc_note; |
28060 | loc_note = NULL; |
28061 | var_loc_p = false; |
28062 | |
28063 | next_real = dwarf2out_next_real_insn (loc_note: call_insn); |
28064 | cached_next_real_insn = NULL; |
28065 | goto create_label; |
28066 | } |
28067 | if (optimize == 0 && !flag_var_tracking) |
28068 | { |
28069 | /* When the var-tracking pass is not running, there is no note |
28070 | for indirect calls whose target is compile-time known. In this |
28071 | case, process such calls specifically so that we generate call |
28072 | sites for them anyway. */ |
28073 | rtx x = PATTERN (insn: loc_note); |
28074 | if (GET_CODE (x) == PARALLEL) |
28075 | x = XVECEXP (x, 0, 0); |
28076 | if (GET_CODE (x) == SET) |
28077 | x = SET_SRC (x); |
28078 | if (GET_CODE (x) == CALL) |
28079 | x = XEXP (x, 0); |
28080 | if (!MEM_P (x) |
28081 | || GET_CODE (XEXP (x, 0)) != SYMBOL_REF |
28082 | || !SYMBOL_REF_DECL (XEXP (x, 0)) |
28083 | || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) |
28084 | != FUNCTION_DECL)) |
28085 | { |
28086 | call_insn = loc_note; |
28087 | loc_note = NULL; |
28088 | var_loc_p = false; |
28089 | |
28090 | next_real = dwarf2out_next_real_insn (loc_note: call_insn); |
28091 | cached_next_real_insn = NULL; |
28092 | goto create_label; |
28093 | } |
28094 | } |
28095 | } |
28096 | else if (!debug_variable_location_views) |
28097 | gcc_unreachable (); |
28098 | else |
28099 | maybe_reset_location_view (insn: loc_note, table: cur_line_info_table); |
28100 | |
28101 | return; |
28102 | } |
28103 | |
28104 | var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION; |
28105 | if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note))) |
28106 | return; |
28107 | |
28108 | /* Optimize processing a large consecutive sequence of location |
28109 | notes so we don't spend too much time in next_real_insn. If the |
28110 | next insn is another location note, remember the next_real_insn |
28111 | calculation for next time. */ |
28112 | next_real = cached_next_real_insn; |
28113 | if (next_real) |
28114 | { |
28115 | if (expected_next_loc_note != loc_note) |
28116 | next_real = NULL; |
28117 | } |
28118 | |
28119 | if (! next_real) |
28120 | next_real = dwarf2out_next_real_insn (loc_note); |
28121 | |
28122 | if (next_real) |
28123 | { |
28124 | rtx_insn *next_note = NEXT_INSN (insn: loc_note); |
28125 | while (next_note != next_real) |
28126 | { |
28127 | if (! next_note->deleted () |
28128 | && NOTE_P (next_note) |
28129 | && NOTE_KIND (next_note) == NOTE_INSN_VAR_LOCATION) |
28130 | break; |
28131 | next_note = NEXT_INSN (insn: next_note); |
28132 | } |
28133 | |
28134 | if (next_note == next_real) |
28135 | cached_next_real_insn = NULL; |
28136 | else |
28137 | { |
28138 | expected_next_loc_note = next_note; |
28139 | cached_next_real_insn = next_real; |
28140 | } |
28141 | } |
28142 | else |
28143 | cached_next_real_insn = NULL; |
28144 | |
28145 | /* If there are no instructions which would be affected by this note, |
28146 | don't do anything. */ |
28147 | if (var_loc_p |
28148 | && next_real == NULL_RTX |
28149 | && !NOTE_DURING_CALL_P (loc_note)) |
28150 | return; |
28151 | |
28152 | create_label: |
28153 | |
28154 | if (next_real == NULL_RTX) |
28155 | next_real = get_last_insn (); |
28156 | |
28157 | /* If there were any real insns between note we processed last time |
28158 | and this note (or if it is the first note), clear |
28159 | last_{,postcall_}label so that they are not reused this time. */ |
28160 | if (last_var_location_insn == NULL_RTX |
28161 | || last_var_location_insn != next_real |
28162 | || last_in_cold_section_p != in_cold_section_p) |
28163 | { |
28164 | last_label = NULL; |
28165 | last_postcall_label = NULL; |
28166 | } |
28167 | |
28168 | if (var_loc_p) |
28169 | { |
28170 | const char *label |
28171 | = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label; |
28172 | view = cur_line_info_table->view; |
28173 | decl = NOTE_VAR_LOCATION_DECL (loc_note); |
28174 | newloc = add_var_loc_to_decl (decl, loc_note, label, view); |
28175 | if (newloc == NULL) |
28176 | return; |
28177 | } |
28178 | else |
28179 | { |
28180 | decl = NULL_TREE; |
28181 | newloc = NULL; |
28182 | } |
28183 | |
28184 | /* If there were no real insns between note we processed last time |
28185 | and this note, use the label we emitted last time. Otherwise |
28186 | create a new label and emit it. */ |
28187 | if (last_label == NULL) |
28188 | { |
28189 | ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL" , loclabel_num); |
28190 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL" , loclabel_num); |
28191 | loclabel_num++; |
28192 | last_label = ggc_strdup (loclabel); |
28193 | /* See if loclabel might be equal to .Ltext0. If yes, |
28194 | bump first_loclabel_num_not_at_text_label. */ |
28195 | if (!have_multiple_function_sections |
28196 | && in_first_function_p |
28197 | && maybe_at_text_label_p) |
28198 | { |
28199 | static rtx_insn *last_start; |
28200 | rtx_insn *insn; |
28201 | for (insn = loc_note; insn; insn = previous_insn (insn)) |
28202 | if (insn == last_start) |
28203 | break; |
28204 | else if (!NONDEBUG_INSN_P (insn)) |
28205 | continue; |
28206 | else |
28207 | { |
28208 | rtx body = PATTERN (insn); |
28209 | if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER) |
28210 | continue; |
28211 | /* Inline asm could occupy zero bytes. */ |
28212 | else if (GET_CODE (body) == ASM_INPUT |
28213 | || asm_noperands (body) >= 0) |
28214 | continue; |
28215 | #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */ |
28216 | else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0) |
28217 | continue; |
28218 | #endif |
28219 | else |
28220 | { |
28221 | /* Assume insn has non-zero length. */ |
28222 | maybe_at_text_label_p = false; |
28223 | break; |
28224 | } |
28225 | } |
28226 | if (maybe_at_text_label_p) |
28227 | { |
28228 | last_start = loc_note; |
28229 | first_loclabel_num_not_at_text_label = loclabel_num; |
28230 | } |
28231 | } |
28232 | } |
28233 | |
28234 | gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX) |
28235 | || (loc_note != NULL_RTX && call_insn == NULL_RTX)); |
28236 | |
28237 | if (!var_loc_p) |
28238 | { |
28239 | struct call_arg_loc_node *ca_loc |
28240 | = ggc_cleared_alloc<call_arg_loc_node> (); |
28241 | rtx_insn *prev = call_insn; |
28242 | |
28243 | ca_loc->call_arg_loc_note |
28244 | = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX); |
28245 | ca_loc->next = NULL; |
28246 | ca_loc->label = last_label; |
28247 | gcc_assert (prev |
28248 | && (CALL_P (prev) |
28249 | || (NONJUMP_INSN_P (prev) |
28250 | && GET_CODE (PATTERN (prev)) == SEQUENCE |
28251 | && CALL_P (XVECEXP (PATTERN (prev), 0, 0))))); |
28252 | if (!CALL_P (prev)) |
28253 | prev = as_a <rtx_sequence *> (p: PATTERN (insn: prev))->insn (index: 0); |
28254 | ca_loc->tail_call_p = SIBLING_CALL_P (prev); |
28255 | |
28256 | /* Look for a SYMBOL_REF in the "prev" instruction. */ |
28257 | rtx x = get_call_rtx_from (prev); |
28258 | if (x) |
28259 | { |
28260 | /* Try to get the call symbol, if any. */ |
28261 | if (MEM_P (XEXP (x, 0))) |
28262 | x = XEXP (x, 0); |
28263 | /* First, look for a memory access to a symbol_ref. */ |
28264 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF |
28265 | && SYMBOL_REF_DECL (XEXP (x, 0)) |
28266 | && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL) |
28267 | ca_loc->symbol_ref = XEXP (x, 0); |
28268 | /* Otherwise, look at a compile-time known user-level function |
28269 | declaration. */ |
28270 | else if (MEM_P (x) |
28271 | && MEM_EXPR (x) |
28272 | && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL) |
28273 | ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0); |
28274 | } |
28275 | |
28276 | ca_loc->block = insn_scope (prev); |
28277 | if (call_arg_locations) |
28278 | call_arg_loc_last->next = ca_loc; |
28279 | else |
28280 | call_arg_locations = ca_loc; |
28281 | call_arg_loc_last = ca_loc; |
28282 | } |
28283 | else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note)) |
28284 | { |
28285 | newloc->label = last_label; |
28286 | newloc->view = view; |
28287 | } |
28288 | else |
28289 | { |
28290 | if (!last_postcall_label) |
28291 | { |
28292 | sprintf (s: loclabel, format: "%s-1" , last_label); |
28293 | last_postcall_label = ggc_strdup (loclabel); |
28294 | } |
28295 | newloc->label = last_postcall_label; |
28296 | /* ??? This view is at last_label, not last_label-1, but we |
28297 | could only assume view at last_label-1 is zero if we could |
28298 | assume calls always have length greater than one. This is |
28299 | probably true in general, though there might be a rare |
28300 | exception to this rule, e.g. if a call insn is optimized out |
28301 | by target magic. Then, even the -1 in the label will be |
28302 | wrong, which might invalidate the range. Anyway, using view, |
28303 | though technically possibly incorrect, will work as far as |
28304 | ranges go: since L-1 is in the middle of the call insn, |
28305 | (L-1).0 and (L-1).V shouldn't make any difference, and having |
28306 | the loclist entry refer to the .loc entry might be useful, so |
28307 | leave it like this. */ |
28308 | newloc->view = view; |
28309 | } |
28310 | |
28311 | if (var_loc_p && flag_debug_asm) |
28312 | { |
28313 | const char *name, *sep, *patstr; |
28314 | if (decl && DECL_NAME (decl)) |
28315 | name = IDENTIFIER_POINTER (DECL_NAME (decl)); |
28316 | else |
28317 | name = "" ; |
28318 | if (NOTE_VAR_LOCATION_LOC (loc_note)) |
28319 | { |
28320 | sep = " => " ; |
28321 | patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note)); |
28322 | } |
28323 | else |
28324 | { |
28325 | sep = " " ; |
28326 | patstr = "RESET" ; |
28327 | } |
28328 | fprintf (stream: asm_out_file, format: "\t%s DEBUG %s%s%s\n" , ASM_COMMENT_START, |
28329 | name, sep, patstr); |
28330 | } |
28331 | |
28332 | last_var_location_insn = next_real; |
28333 | last_in_cold_section_p = in_cold_section_p; |
28334 | } |
28335 | |
28336 | /* Check whether BLOCK, a lexical block, is nested within OUTER, or is |
28337 | OUTER itself. If BOTHWAYS, check not only that BLOCK can reach |
28338 | OUTER through BLOCK_SUPERCONTEXT links, but also that there is a |
28339 | path from OUTER to BLOCK through BLOCK_SUBBLOCKs and |
28340 | BLOCK_FRAGMENT_ORIGIN links. */ |
28341 | static bool |
28342 | block_within_block_p (tree block, tree outer, bool bothways) |
28343 | { |
28344 | if (block == outer) |
28345 | return true; |
28346 | |
28347 | /* Quickly check that OUTER is up BLOCK's supercontext chain. */ |
28348 | for (tree context = BLOCK_SUPERCONTEXT (block); |
28349 | context != outer; |
28350 | context = BLOCK_SUPERCONTEXT (context)) |
28351 | if (!context || TREE_CODE (context) != BLOCK) |
28352 | return false; |
28353 | |
28354 | if (!bothways) |
28355 | return true; |
28356 | |
28357 | /* Now check that each block is actually referenced by its |
28358 | parent. */ |
28359 | for (tree context = BLOCK_SUPERCONTEXT (block); ; |
28360 | context = BLOCK_SUPERCONTEXT (context)) |
28361 | { |
28362 | if (BLOCK_FRAGMENT_ORIGIN (context)) |
28363 | { |
28364 | gcc_assert (!BLOCK_SUBBLOCKS (context)); |
28365 | context = BLOCK_FRAGMENT_ORIGIN (context); |
28366 | } |
28367 | for (tree sub = BLOCK_SUBBLOCKS (context); |
28368 | sub != block; |
28369 | sub = BLOCK_CHAIN (sub)) |
28370 | if (!sub) |
28371 | return false; |
28372 | if (context == outer) |
28373 | return true; |
28374 | else |
28375 | block = context; |
28376 | } |
28377 | } |
28378 | |
28379 | /* Called during final while assembling the marker of the entry point |
28380 | for an inlined function. */ |
28381 | |
28382 | static void |
28383 | dwarf2out_inline_entry (tree block) |
28384 | { |
28385 | gcc_assert (debug_inline_points); |
28386 | |
28387 | /* If we can't represent it, don't bother. */ |
28388 | if (!(dwarf_version >= 3 || !dwarf_strict)) |
28389 | return; |
28390 | |
28391 | gcc_assert (DECL_P (block_ultimate_origin (block))); |
28392 | |
28393 | /* Sanity check the block tree. This would catch a case in which |
28394 | BLOCK got removed from the tree reachable from the outermost |
28395 | lexical block, but got retained in markers. It would still link |
28396 | back to its parents, but some ancestor would be missing a link |
28397 | down the path to the sub BLOCK. If the block got removed, its |
28398 | BLOCK_NUMBER will not be a usable value. */ |
28399 | if (flag_checking) |
28400 | gcc_assert (block_within_block_p (block, |
28401 | DECL_INITIAL (current_function_decl), |
28402 | true)); |
28403 | |
28404 | gcc_assert (inlined_function_outer_scope_p (block)); |
28405 | gcc_assert (!lookup_block_die (block)); |
28406 | |
28407 | if (BLOCK_FRAGMENT_ORIGIN (block)) |
28408 | block = BLOCK_FRAGMENT_ORIGIN (block); |
28409 | /* Can the entry point ever not be at the beginning of an |
28410 | unfragmented lexical block? */ |
28411 | else if (!(BLOCK_FRAGMENT_CHAIN (block) |
28412 | || (cur_line_info_table |
28413 | && !ZERO_VIEW_P (cur_line_info_table->view)))) |
28414 | return; |
28415 | |
28416 | if (!inline_entry_data_table) |
28417 | inline_entry_data_table |
28418 | = hash_table<inline_entry_data_hasher>::create_ggc (n: 10); |
28419 | |
28420 | |
28421 | inline_entry_data **iedp |
28422 | = inline_entry_data_table->find_slot_with_hash (comparable: block, |
28423 | hash: htab_hash_pointer (block), |
28424 | insert: INSERT); |
28425 | if (*iedp) |
28426 | /* ??? Ideally, we'd record all entry points for the same inlined |
28427 | function (some may have been duplicated by e.g. unrolling), but |
28428 | we have no way to represent that ATM. */ |
28429 | return; |
28430 | |
28431 | inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> (); |
28432 | ied->block = block; |
28433 | ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL; |
28434 | ied->label_num = BLOCK_NUMBER (block); |
28435 | if (cur_line_info_table) |
28436 | ied->view = cur_line_info_table->view; |
28437 | |
28438 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL, |
28439 | BLOCK_NUMBER (block)); |
28440 | } |
28441 | |
28442 | /* Called from finalize_size_functions for size functions so that their body |
28443 | can be encoded in the debug info to describe the layout of variable-length |
28444 | structures. */ |
28445 | |
28446 | static void |
28447 | dwarf2out_size_function (tree decl) |
28448 | { |
28449 | set_early_dwarf s; |
28450 | function_to_dwarf_procedure (fndecl: decl); |
28451 | } |
28452 | |
28453 | /* Note in one location list that text section has changed. */ |
28454 | |
28455 | int |
28456 | var_location_switch_text_section_1 (var_loc_list **slot, void *) |
28457 | { |
28458 | var_loc_list *list = *slot; |
28459 | if (list->first) |
28460 | list->last_before_switch |
28461 | = list->last->next ? list->last->next : list->last; |
28462 | return 1; |
28463 | } |
28464 | |
28465 | /* Note in all location lists that text section has changed. */ |
28466 | |
28467 | static void |
28468 | var_location_switch_text_section (void) |
28469 | { |
28470 | if (decl_loc_table == NULL) |
28471 | return; |
28472 | |
28473 | decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL); |
28474 | } |
28475 | |
28476 | /* Create a new line number table. */ |
28477 | |
28478 | static dw_line_info_table * |
28479 | new_line_info_table (void) |
28480 | { |
28481 | dw_line_info_table *table; |
28482 | |
28483 | table = ggc_cleared_alloc<dw_line_info_table> (); |
28484 | table->file_num = 1; |
28485 | table->line_num = 1; |
28486 | table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START; |
28487 | FORCE_RESET_NEXT_VIEW (table->view); |
28488 | table->symviews_since_reset = 0; |
28489 | |
28490 | return table; |
28491 | } |
28492 | |
28493 | /* Lookup the "current" table into which we emit line info, so |
28494 | that we don't have to do it for every source line. */ |
28495 | |
28496 | static void |
28497 | set_cur_line_info_table (section *sec) |
28498 | { |
28499 | dw_line_info_table *table; |
28500 | |
28501 | if (sec == text_section) |
28502 | table = text_section_line_info; |
28503 | else if (sec == cold_text_section) |
28504 | { |
28505 | table = cold_text_section_line_info; |
28506 | if (!table) |
28507 | { |
28508 | cold_text_section_line_info = table = new_line_info_table (); |
28509 | table->end_label = cold_end_label; |
28510 | } |
28511 | } |
28512 | else |
28513 | { |
28514 | const char *end_label; |
28515 | |
28516 | if (crtl->has_bb_partition) |
28517 | { |
28518 | if (in_cold_section_p) |
28519 | end_label = crtl->subsections.cold_section_end_label; |
28520 | else |
28521 | end_label = crtl->subsections.hot_section_end_label; |
28522 | } |
28523 | else |
28524 | { |
28525 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
28526 | ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL, |
28527 | current_function_funcdef_no); |
28528 | end_label = ggc_strdup (label); |
28529 | } |
28530 | |
28531 | table = new_line_info_table (); |
28532 | table->end_label = end_label; |
28533 | |
28534 | vec_safe_push (v&: separate_line_info, obj: table); |
28535 | } |
28536 | |
28537 | if (output_asm_line_debug_info ()) |
28538 | table->is_stmt = (cur_line_info_table |
28539 | ? cur_line_info_table->is_stmt |
28540 | : DWARF_LINE_DEFAULT_IS_STMT_START); |
28541 | cur_line_info_table = table; |
28542 | } |
28543 | |
28544 | |
28545 | /* We need to reset the locations at the beginning of each |
28546 | function. We can't do this in the end_function hook, because the |
28547 | declarations that use the locations won't have been output when |
28548 | that hook is called. Also compute have_multiple_function_sections here. */ |
28549 | |
28550 | static void |
28551 | dwarf2out_begin_function (tree fun) |
28552 | { |
28553 | section *sec = function_section (fun); |
28554 | |
28555 | if (sec != text_section) |
28556 | have_multiple_function_sections = true; |
28557 | |
28558 | if (crtl->has_bb_partition && !cold_text_section) |
28559 | { |
28560 | gcc_assert (current_function_decl == fun); |
28561 | cold_text_section = unlikely_text_section (); |
28562 | switch_to_section (cold_text_section); |
28563 | ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label); |
28564 | switch_to_section (sec); |
28565 | } |
28566 | |
28567 | call_site_count = 0; |
28568 | tail_call_site_count = 0; |
28569 | |
28570 | set_cur_line_info_table (sec); |
28571 | FORCE_RESET_NEXT_VIEW (cur_line_info_table->view); |
28572 | } |
28573 | |
28574 | /* Helper function of dwarf2out_end_function, called only after emitting |
28575 | the very first function into assembly. Check if some .debug_loc range |
28576 | might end with a .LVL* label that could be equal to .Ltext0. |
28577 | In that case we must force using absolute addresses in .debug_loc ranges, |
28578 | because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for |
28579 | .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc |
28580 | list terminator. |
28581 | Set have_multiple_function_sections to true in that case and |
28582 | terminate htab traversal. */ |
28583 | |
28584 | int |
28585 | find_empty_loc_ranges_at_text_label (var_loc_list **slot, int) |
28586 | { |
28587 | var_loc_list *entry = *slot; |
28588 | struct var_loc_node *node; |
28589 | |
28590 | node = entry->first; |
28591 | if (node && node->next && node->next->label) |
28592 | { |
28593 | unsigned int i; |
28594 | const char *label = node->next->label; |
28595 | char loclabel[MAX_ARTIFICIAL_LABEL_BYTES]; |
28596 | |
28597 | for (i = 0; i < first_loclabel_num_not_at_text_label; i++) |
28598 | { |
28599 | ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL" , i); |
28600 | if (strcmp (s1: label, s2: loclabel) == 0) |
28601 | { |
28602 | have_multiple_function_sections = true; |
28603 | return 0; |
28604 | } |
28605 | } |
28606 | } |
28607 | return 1; |
28608 | } |
28609 | |
28610 | /* Hook called after emitting a function into assembly. |
28611 | This does something only for the very first function emitted. */ |
28612 | |
28613 | static void |
28614 | dwarf2out_end_function (unsigned int) |
28615 | { |
28616 | if (in_first_function_p |
28617 | && !have_multiple_function_sections |
28618 | && first_loclabel_num_not_at_text_label |
28619 | && decl_loc_table) |
28620 | decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (argument: 0); |
28621 | in_first_function_p = false; |
28622 | maybe_at_text_label_p = false; |
28623 | } |
28624 | |
28625 | /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let |
28626 | front-ends register a translation unit even before dwarf2out_init is |
28627 | called. */ |
28628 | static tree main_translation_unit = NULL_TREE; |
28629 | |
28630 | /* Hook called by front-ends after they built their main translation unit. |
28631 | Associate comp_unit_die to UNIT. */ |
28632 | |
28633 | static void |
28634 | dwarf2out_register_main_translation_unit (tree unit) |
28635 | { |
28636 | gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL |
28637 | && main_translation_unit == NULL_TREE); |
28638 | main_translation_unit = unit; |
28639 | /* If dwarf2out_init has not been called yet, it will perform the association |
28640 | itself looking at main_translation_unit. */ |
28641 | if (decl_die_table != NULL) |
28642 | equate_decl_number_to_die (decl: unit, decl_die: comp_unit_die ()); |
28643 | } |
28644 | |
28645 | /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */ |
28646 | |
28647 | static void |
28648 | push_dw_line_info_entry (dw_line_info_table *table, |
28649 | enum dw_line_info_opcode opcode, unsigned int val) |
28650 | { |
28651 | dw_line_info_entry e; |
28652 | e.opcode = opcode; |
28653 | e.val = val; |
28654 | vec_safe_push (v&: table->entries, obj: e); |
28655 | } |
28656 | |
28657 | /* Output a label to mark the beginning of a source code line entry |
28658 | and record information relating to this source line, in |
28659 | 'line_info_table' for later output of the .debug_line section. */ |
28660 | /* ??? The discriminator parameter ought to be unsigned. */ |
28661 | |
28662 | static void |
28663 | dwarf2out_source_line (unsigned int line, unsigned int column, |
28664 | const char *filename, |
28665 | int discriminator, bool is_stmt) |
28666 | { |
28667 | unsigned int file_num; |
28668 | dw_line_info_table *table; |
28669 | static var_loc_view lvugid; |
28670 | |
28671 | /* 'line_info_table' information gathering is not needed when the debug |
28672 | info level is set to the lowest value. Also, the current DWARF-based |
28673 | debug formats do not use this info. */ |
28674 | if (debug_info_level < DINFO_LEVEL_TERSE || !dwarf_debuginfo_p ()) |
28675 | return; |
28676 | |
28677 | table = cur_line_info_table; |
28678 | |
28679 | if (line == 0) |
28680 | { |
28681 | if (debug_variable_location_views |
28682 | && output_asm_line_debug_info () |
28683 | && table && !RESETTING_VIEW_P (table->view)) |
28684 | { |
28685 | /* If we're using the assembler to compute view numbers, we |
28686 | can't issue a .loc directive for line zero, so we can't |
28687 | get a view number at this point. We might attempt to |
28688 | compute it from the previous view, or equate it to a |
28689 | subsequent view (though it might not be there!), but |
28690 | since we're omitting the line number entry, we might as |
28691 | well omit the view number as well. That means pretending |
28692 | it's a view number zero, which might very well turn out |
28693 | to be correct. ??? Extend the assembler so that the |
28694 | compiler could emit e.g. ".locview .LVU#", to output a |
28695 | view without changing line number information. We'd then |
28696 | have to count it in symviews_since_reset; when it's omitted, |
28697 | it doesn't count. */ |
28698 | if (!zero_view_p) |
28699 | zero_view_p = BITMAP_GGC_ALLOC (); |
28700 | bitmap_set_bit (zero_view_p, table->view); |
28701 | if (flag_debug_asm) |
28702 | { |
28703 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
28704 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , table->view); |
28705 | fprintf (stream: asm_out_file, format: "\t%s line 0, omitted view " , |
28706 | ASM_COMMENT_START); |
28707 | assemble_name (asm_out_file, label); |
28708 | putc (c: '\n', stream: asm_out_file); |
28709 | } |
28710 | table->view = ++lvugid; |
28711 | } |
28712 | return; |
28713 | } |
28714 | |
28715 | /* The discriminator column was added in dwarf4. Simplify the below |
28716 | by simply removing it if we're not supposed to output it. */ |
28717 | if (dwarf_version < 4 && dwarf_strict) |
28718 | discriminator = 0; |
28719 | |
28720 | if (!debug_column_info) |
28721 | column = 0; |
28722 | |
28723 | file_num = maybe_emit_file (fd: lookup_filename (file_name: filename)); |
28724 | |
28725 | /* ??? TODO: Elide duplicate line number entries. Traditionally, |
28726 | the debugger has used the second (possibly duplicate) line number |
28727 | at the beginning of the function to mark the end of the prologue. |
28728 | We could eliminate any other duplicates within the function. For |
28729 | Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in |
28730 | that second line number entry. */ |
28731 | /* Recall that this end-of-prologue indication is *not* the same thing |
28732 | as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note, |
28733 | to which the hook corresponds, follows the last insn that was |
28734 | emitted by gen_prologue. What we need is to precede the first insn |
28735 | that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first |
28736 | insn that corresponds to something the user wrote. These may be |
28737 | very different locations once scheduling is enabled. */ |
28738 | |
28739 | if (0 && file_num == table->file_num |
28740 | && line == table->line_num |
28741 | && column == table->column_num |
28742 | && discriminator == table->discrim_num |
28743 | && is_stmt == table->is_stmt) |
28744 | return; |
28745 | |
28746 | switch_to_section (current_function_section ()); |
28747 | |
28748 | /* If requested, emit something human-readable. */ |
28749 | if (flag_debug_asm) |
28750 | { |
28751 | if (debug_column_info) |
28752 | fprintf (stream: asm_out_file, format: "\t%s %s:%d:%d\n" , ASM_COMMENT_START, |
28753 | filename, line, column); |
28754 | else |
28755 | fprintf (stream: asm_out_file, format: "\t%s %s:%d\n" , ASM_COMMENT_START, |
28756 | filename, line); |
28757 | } |
28758 | |
28759 | if (output_asm_line_debug_info ()) |
28760 | { |
28761 | /* Emit the .loc directive understood by GNU as. */ |
28762 | /* "\t.loc %u %u 0 is_stmt %u discriminator %u", |
28763 | file_num, line, is_stmt, discriminator */ |
28764 | fputs (s: "\t.loc " , stream: asm_out_file); |
28765 | fprint_ul (asm_out_file, file_num); |
28766 | putc (c: ' ', stream: asm_out_file); |
28767 | fprint_ul (asm_out_file, line); |
28768 | putc (c: ' ', stream: asm_out_file); |
28769 | fprint_ul (asm_out_file, column); |
28770 | |
28771 | if (is_stmt != table->is_stmt) |
28772 | { |
28773 | #if HAVE_GAS_LOC_STMT |
28774 | fputs (s: " is_stmt " , stream: asm_out_file); |
28775 | putc (c: is_stmt ? '1' : '0', stream: asm_out_file); |
28776 | #endif |
28777 | } |
28778 | if (SUPPORTS_DISCRIMINATOR && discriminator != 0) |
28779 | { |
28780 | gcc_assert (discriminator > 0); |
28781 | fputs (s: " discriminator " , stream: asm_out_file); |
28782 | fprint_ul (asm_out_file, (unsigned long) discriminator); |
28783 | } |
28784 | if (debug_variable_location_views) |
28785 | { |
28786 | if (!RESETTING_VIEW_P (table->view)) |
28787 | { |
28788 | table->symviews_since_reset++; |
28789 | if (table->symviews_since_reset > symview_upper_bound) |
28790 | symview_upper_bound = table->symviews_since_reset; |
28791 | /* When we're using the assembler to compute view |
28792 | numbers, we output symbolic labels after "view" in |
28793 | .loc directives, and the assembler will set them for |
28794 | us, so that we can refer to the view numbers in |
28795 | location lists. The only exceptions are when we know |
28796 | a view will be zero: "-0" is a forced reset, used |
28797 | e.g. in the beginning of functions, whereas "0" tells |
28798 | the assembler to check that there was a PC change |
28799 | since the previous view, in a way that implicitly |
28800 | resets the next view. */ |
28801 | fputs (s: " view " , stream: asm_out_file); |
28802 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
28803 | ASM_GENERATE_INTERNAL_LABEL (label, "LVU" , table->view); |
28804 | assemble_name (asm_out_file, label); |
28805 | table->view = ++lvugid; |
28806 | } |
28807 | else |
28808 | { |
28809 | table->symviews_since_reset = 0; |
28810 | if (FORCE_RESETTING_VIEW_P (table->view)) |
28811 | fputs (s: " view -0" , stream: asm_out_file); |
28812 | else |
28813 | fputs (s: " view 0" , stream: asm_out_file); |
28814 | /* Mark the present view as a zero view. Earlier debug |
28815 | binds may have already added its id to loclists to be |
28816 | emitted later, so we can't reuse the id for something |
28817 | else. However, it's good to know whether a view is |
28818 | known to be zero, because then we may be able to |
28819 | optimize out locviews that are all zeros, so take |
28820 | note of it in zero_view_p. */ |
28821 | if (!zero_view_p) |
28822 | zero_view_p = BITMAP_GGC_ALLOC (); |
28823 | bitmap_set_bit (zero_view_p, lvugid); |
28824 | table->view = ++lvugid; |
28825 | } |
28826 | } |
28827 | putc (c: '\n', stream: asm_out_file); |
28828 | } |
28829 | else |
28830 | { |
28831 | unsigned int label_num = ++line_info_label_num; |
28832 | |
28833 | targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num); |
28834 | |
28835 | if (debug_variable_location_views && !RESETTING_VIEW_P (table->view)) |
28836 | push_dw_line_info_entry (table, opcode: LI_adv_address, val: label_num); |
28837 | else |
28838 | push_dw_line_info_entry (table, opcode: LI_set_address, val: label_num); |
28839 | if (debug_variable_location_views) |
28840 | { |
28841 | bool resetting = FORCE_RESETTING_VIEW_P (table->view); |
28842 | if (resetting) |
28843 | table->view = 0; |
28844 | |
28845 | if (flag_debug_asm) |
28846 | fprintf (stream: asm_out_file, format: "\t%s view %s%d\n" , |
28847 | ASM_COMMENT_START, |
28848 | resetting ? "-" : "" , |
28849 | table->view); |
28850 | |
28851 | table->view++; |
28852 | } |
28853 | if (file_num != table->file_num) |
28854 | push_dw_line_info_entry (table, opcode: LI_set_file, val: file_num); |
28855 | if (discriminator != table->discrim_num) |
28856 | push_dw_line_info_entry (table, opcode: LI_set_discriminator, val: discriminator); |
28857 | if (is_stmt != table->is_stmt) |
28858 | push_dw_line_info_entry (table, opcode: LI_negate_stmt, val: 0); |
28859 | push_dw_line_info_entry (table, opcode: LI_set_line, val: line); |
28860 | if (debug_column_info) |
28861 | push_dw_line_info_entry (table, opcode: LI_set_column, val: column); |
28862 | } |
28863 | |
28864 | table->file_num = file_num; |
28865 | table->line_num = line; |
28866 | table->column_num = column; |
28867 | table->discrim_num = discriminator; |
28868 | table->is_stmt = is_stmt; |
28869 | table->in_use = true; |
28870 | } |
28871 | |
28872 | /* Record a source file location for a DECL_IGNORED_P function. */ |
28873 | |
28874 | static void |
28875 | dwarf2out_set_ignored_loc (unsigned int line, unsigned int column, |
28876 | const char *filename) |
28877 | { |
28878 | dw_fde_ref fde = cfun->fde; |
28879 | |
28880 | fde->ignored_debug = false; |
28881 | set_cur_line_info_table (function_section (fde->decl)); |
28882 | |
28883 | dwarf2out_source_line (line, column, filename, discriminator: 0, is_stmt: true); |
28884 | } |
28885 | |
28886 | /* Record the beginning of a new source file. */ |
28887 | |
28888 | static void |
28889 | dwarf2out_start_source_file (unsigned int lineno, const char *filename) |
28890 | { |
28891 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
28892 | { |
28893 | macinfo_entry e; |
28894 | e.code = DW_MACINFO_start_file; |
28895 | e.lineno = lineno; |
28896 | e.info = ggc_strdup (filename); |
28897 | vec_safe_push (v&: macinfo_table, obj: e); |
28898 | } |
28899 | } |
28900 | |
28901 | /* Record the end of a source file. */ |
28902 | |
28903 | static void |
28904 | dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED) |
28905 | { |
28906 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
28907 | { |
28908 | macinfo_entry e; |
28909 | e.code = DW_MACINFO_end_file; |
28910 | e.lineno = lineno; |
28911 | e.info = NULL; |
28912 | vec_safe_push (v&: macinfo_table, obj: e); |
28913 | } |
28914 | } |
28915 | |
28916 | /* Called from debug_define in toplev.cc. The `buffer' parameter contains |
28917 | the tail part of the directive line, i.e. the part which is past the |
28918 | initial whitespace, #, whitespace, directive-name, whitespace part. */ |
28919 | |
28920 | static void |
28921 | dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED, |
28922 | const char *buffer ATTRIBUTE_UNUSED) |
28923 | { |
28924 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
28925 | { |
28926 | macinfo_entry e; |
28927 | /* Insert a dummy first entry to be able to optimize the whole |
28928 | predefined macro block using DW_MACRO_import. */ |
28929 | if (macinfo_table->is_empty () && lineno <= 1) |
28930 | { |
28931 | e.code = 0; |
28932 | e.lineno = 0; |
28933 | e.info = NULL; |
28934 | vec_safe_push (v&: macinfo_table, obj: e); |
28935 | } |
28936 | e.code = DW_MACINFO_define; |
28937 | e.lineno = lineno; |
28938 | e.info = ggc_strdup (buffer); |
28939 | vec_safe_push (v&: macinfo_table, obj: e); |
28940 | } |
28941 | } |
28942 | |
28943 | /* Called from debug_undef in toplev.cc. The `buffer' parameter contains |
28944 | the tail part of the directive line, i.e. the part which is past the |
28945 | initial whitespace, #, whitespace, directive-name, whitespace part. */ |
28946 | |
28947 | static void |
28948 | dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED, |
28949 | const char *buffer ATTRIBUTE_UNUSED) |
28950 | { |
28951 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
28952 | { |
28953 | macinfo_entry e; |
28954 | /* Insert a dummy first entry to be able to optimize the whole |
28955 | predefined macro block using DW_MACRO_import. */ |
28956 | if (macinfo_table->is_empty () && lineno <= 1) |
28957 | { |
28958 | e.code = 0; |
28959 | e.lineno = 0; |
28960 | e.info = NULL; |
28961 | vec_safe_push (v&: macinfo_table, obj: e); |
28962 | } |
28963 | e.code = DW_MACINFO_undef; |
28964 | e.lineno = lineno; |
28965 | e.info = ggc_strdup (buffer); |
28966 | vec_safe_push (v&: macinfo_table, obj: e); |
28967 | } |
28968 | } |
28969 | |
28970 | /* Helpers to manipulate hash table of CUs. */ |
28971 | |
28972 | struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry> |
28973 | { |
28974 | static inline hashval_t hash (const macinfo_entry *); |
28975 | static inline bool equal (const macinfo_entry *, const macinfo_entry *); |
28976 | }; |
28977 | |
28978 | inline hashval_t |
28979 | macinfo_entry_hasher::hash (const macinfo_entry *entry) |
28980 | { |
28981 | return htab_hash_string (entry->info); |
28982 | } |
28983 | |
28984 | inline bool |
28985 | macinfo_entry_hasher::equal (const macinfo_entry *entry1, |
28986 | const macinfo_entry *entry2) |
28987 | { |
28988 | return !strcmp (s1: entry1->info, s2: entry2->info); |
28989 | } |
28990 | |
28991 | typedef hash_table<macinfo_entry_hasher> macinfo_hash_type; |
28992 | |
28993 | /* Output a single .debug_macinfo entry. */ |
28994 | |
28995 | static void |
28996 | output_macinfo_op (macinfo_entry *ref) |
28997 | { |
28998 | int file_num; |
28999 | size_t len; |
29000 | struct indirect_string_node *node; |
29001 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
29002 | struct dwarf_file_data *fd; |
29003 | |
29004 | switch (ref->code) |
29005 | { |
29006 | case DW_MACINFO_start_file: |
29007 | fd = lookup_filename (file_name: ref->info); |
29008 | file_num = maybe_emit_file (fd); |
29009 | dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file" ); |
29010 | dw2_asm_output_data_uleb128 (ref->lineno, |
29011 | "Included from line number " |
29012 | HOST_WIDE_INT_PRINT_UNSIGNED, |
29013 | ref->lineno); |
29014 | dw2_asm_output_data_uleb128 (file_num, "file %s" , ref->info); |
29015 | break; |
29016 | case DW_MACINFO_end_file: |
29017 | dw2_asm_output_data (1, DW_MACINFO_end_file, "End file" ); |
29018 | break; |
29019 | case DW_MACINFO_define: |
29020 | case DW_MACINFO_undef: |
29021 | len = strlen (s: ref->info) + 1; |
29022 | if ((!dwarf_strict || dwarf_version >= 5) |
29023 | && len > (size_t) dwarf_offset_size |
29024 | && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
29025 | && (debug_str_section->common.flags & SECTION_MERGE) != 0) |
29026 | { |
29027 | if (dwarf_split_debug_info && dwarf_version >= 5) |
29028 | ref->code = ref->code == DW_MACINFO_define |
29029 | ? DW_MACRO_define_strx : DW_MACRO_undef_strx; |
29030 | else |
29031 | ref->code = ref->code == DW_MACINFO_define |
29032 | ? DW_MACRO_define_strp : DW_MACRO_undef_strp; |
29033 | output_macinfo_op (ref); |
29034 | return; |
29035 | } |
29036 | dw2_asm_output_data (1, ref->code, |
29037 | ref->code == DW_MACINFO_define |
29038 | ? "Define macro" : "Undefine macro" ); |
29039 | dw2_asm_output_data_uleb128 (ref->lineno, |
29040 | "At line number " |
29041 | HOST_WIDE_INT_PRINT_UNSIGNED, |
29042 | ref->lineno); |
29043 | dw2_asm_output_nstring (ref->info, -1, "The macro" ); |
29044 | break; |
29045 | case DW_MACRO_define_strp: |
29046 | dw2_asm_output_data (1, ref->code, "Define macro strp" ); |
29047 | goto do_DW_MACRO_define_strpx; |
29048 | case DW_MACRO_undef_strp: |
29049 | dw2_asm_output_data (1, ref->code, "Undefine macro strp" ); |
29050 | goto do_DW_MACRO_define_strpx; |
29051 | case DW_MACRO_define_strx: |
29052 | dw2_asm_output_data (1, ref->code, "Define macro strx" ); |
29053 | goto do_DW_MACRO_define_strpx; |
29054 | case DW_MACRO_undef_strx: |
29055 | dw2_asm_output_data (1, ref->code, "Undefine macro strx" ); |
29056 | /* FALLTHRU */ |
29057 | do_DW_MACRO_define_strpx: |
29058 | /* NB: dwarf2out_finish performs: |
29059 | 1. save_macinfo_strings |
29060 | 2. hash table traverse of index_string |
29061 | 3. output_macinfo -> output_macinfo_op |
29062 | 4. output_indirect_strings |
29063 | -> hash table traverse of output_index_string |
29064 | |
29065 | When output_macinfo_op is called, all index strings have been |
29066 | added to hash table by save_macinfo_strings and we can't pass |
29067 | INSERT to find_slot_with_hash which may expand hash table, even |
29068 | if no insertion is needed, and change hash table traverse order |
29069 | between index_string and output_index_string. */ |
29070 | node = find_AT_string (str: ref->info, insert: NO_INSERT); |
29071 | gcc_assert (node |
29072 | && (node->form == DW_FORM_strp |
29073 | || node->form == dwarf_FORM (DW_FORM_strx))); |
29074 | dw2_asm_output_data_uleb128 (ref->lineno, |
29075 | "At line number " |
29076 | HOST_WIDE_INT_PRINT_UNSIGNED, |
29077 | ref->lineno); |
29078 | if (node->form == DW_FORM_strp) |
29079 | dw2_asm_output_offset (dwarf_offset_size, node->label, |
29080 | debug_str_section, "The macro: \"%s\"" , |
29081 | ref->info); |
29082 | else |
29083 | dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"" , |
29084 | ref->info); |
29085 | break; |
29086 | case DW_MACRO_import: |
29087 | dw2_asm_output_data (1, ref->code, "Import" ); |
29088 | ASM_GENERATE_INTERNAL_LABEL (label, |
29089 | DEBUG_MACRO_SECTION_LABEL, |
29090 | ref->lineno + macinfo_label_base); |
29091 | dw2_asm_output_offset (dwarf_offset_size, label, NULL, NULL); |
29092 | break; |
29093 | default: |
29094 | fprintf (stream: asm_out_file, format: "%s unrecognized macinfo code %lu\n" , |
29095 | ASM_COMMENT_START, (unsigned long) ref->code); |
29096 | break; |
29097 | } |
29098 | } |
29099 | |
29100 | /* Attempt to make a sequence of define/undef macinfo ops shareable with |
29101 | other compilation unit .debug_macinfo sections. IDX is the first |
29102 | index of a define/undef, return the number of ops that should be |
29103 | emitted in a comdat .debug_macinfo section and emit |
29104 | a DW_MACRO_import entry referencing it. |
29105 | If the define/undef entry should be emitted normally, return 0. */ |
29106 | |
29107 | static unsigned |
29108 | optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files, |
29109 | macinfo_hash_type **macinfo_htab) |
29110 | { |
29111 | macinfo_entry *first, *second, *cur, *inc; |
29112 | char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1]; |
29113 | unsigned char checksum[16]; |
29114 | struct md5_ctx ctx; |
29115 | char *grp_name, *tail; |
29116 | const char *base; |
29117 | unsigned int i, count, encoded_filename_len, linebuf_len; |
29118 | macinfo_entry **slot; |
29119 | |
29120 | first = &(*macinfo_table)[idx]; |
29121 | second = &(*macinfo_table)[idx + 1]; |
29122 | |
29123 | /* Optimize only if there are at least two consecutive define/undef ops, |
29124 | and either all of them are before first DW_MACINFO_start_file |
29125 | with lineno {0,1} (i.e. predefined macro block), or all of them are |
29126 | in some included header file. */ |
29127 | if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef) |
29128 | return 0; |
29129 | if (vec_safe_is_empty (v: files)) |
29130 | { |
29131 | if (first->lineno > 1 || second->lineno > 1) |
29132 | return 0; |
29133 | } |
29134 | else if (first->lineno == 0) |
29135 | return 0; |
29136 | |
29137 | /* Find the last define/undef entry that can be grouped together |
29138 | with first and at the same time compute md5 checksum of their |
29139 | codes, linenumbers and strings. */ |
29140 | md5_init_ctx (ctx: &ctx); |
29141 | for (i = idx; macinfo_table->iterate (ix: i, ptr: &cur); i++) |
29142 | if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef) |
29143 | break; |
29144 | else if (vec_safe_is_empty (v: files) && cur->lineno > 1) |
29145 | break; |
29146 | else |
29147 | { |
29148 | unsigned char code = cur->code; |
29149 | md5_process_bytes (buffer: &code, len: 1, ctx: &ctx); |
29150 | checksum_uleb128 (value: cur->lineno, ctx: &ctx); |
29151 | md5_process_bytes (buffer: cur->info, len: strlen (s: cur->info) + 1, ctx: &ctx); |
29152 | } |
29153 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
29154 | count = i - idx; |
29155 | |
29156 | /* From the containing include filename (if any) pick up just |
29157 | usable characters from its basename. */ |
29158 | if (vec_safe_is_empty (v: files)) |
29159 | base = "" ; |
29160 | else |
29161 | base = lbasename (files->last ().info); |
29162 | for (encoded_filename_len = 0, i = 0; base[i]; i++) |
29163 | if (ISIDNUM (base[i]) || base[i] == '.') |
29164 | encoded_filename_len++; |
29165 | /* Count . at the end. */ |
29166 | if (encoded_filename_len) |
29167 | encoded_filename_len++; |
29168 | |
29169 | sprintf (s: linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno); |
29170 | linebuf_len = strlen (s: linebuf); |
29171 | |
29172 | /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */ |
29173 | grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1 |
29174 | + 16 * 2 + 1); |
29175 | memcpy (dest: grp_name, dwarf_offset_size == 4 ? "wm4." : "wm8." , n: 4); |
29176 | tail = grp_name + 4; |
29177 | if (encoded_filename_len) |
29178 | { |
29179 | for (i = 0; base[i]; i++) |
29180 | if (ISIDNUM (base[i]) || base[i] == '.') |
29181 | *tail++ = base[i]; |
29182 | *tail++ = '.'; |
29183 | } |
29184 | memcpy (dest: tail, src: linebuf, n: linebuf_len); |
29185 | tail += linebuf_len; |
29186 | *tail++ = '.'; |
29187 | for (i = 0; i < 16; i++) |
29188 | sprintf (s: tail + i * 2, format: "%02x" , checksum[i] & 0xff); |
29189 | |
29190 | /* Construct a macinfo_entry for DW_MACRO_import |
29191 | in the empty vector entry before the first define/undef. */ |
29192 | inc = &(*macinfo_table)[idx - 1]; |
29193 | inc->code = DW_MACRO_import; |
29194 | inc->lineno = 0; |
29195 | inc->info = ggc_strdup (grp_name); |
29196 | if (!*macinfo_htab) |
29197 | *macinfo_htab = new macinfo_hash_type (10); |
29198 | /* Avoid emitting duplicates. */ |
29199 | slot = (*macinfo_htab)->find_slot (value: inc, insert: INSERT); |
29200 | if (*slot != NULL) |
29201 | { |
29202 | inc->code = 0; |
29203 | inc->info = NULL; |
29204 | /* If such an entry has been used before, just emit |
29205 | a DW_MACRO_import op. */ |
29206 | inc = *slot; |
29207 | output_macinfo_op (ref: inc); |
29208 | /* And clear all macinfo_entry in the range to avoid emitting them |
29209 | in the second pass. */ |
29210 | for (i = idx; macinfo_table->iterate (ix: i, ptr: &cur) && i < idx + count; i++) |
29211 | { |
29212 | cur->code = 0; |
29213 | cur->info = NULL; |
29214 | } |
29215 | } |
29216 | else |
29217 | { |
29218 | *slot = inc; |
29219 | inc->lineno = (*macinfo_htab)->elements (); |
29220 | output_macinfo_op (ref: inc); |
29221 | } |
29222 | return count; |
29223 | } |
29224 | |
29225 | /* Save any strings needed by the macinfo table in the debug str |
29226 | table. All strings must be collected into the table by the time |
29227 | index_string is called. */ |
29228 | |
29229 | static void |
29230 | save_macinfo_strings (void) |
29231 | { |
29232 | unsigned len; |
29233 | unsigned i; |
29234 | macinfo_entry *ref; |
29235 | |
29236 | for (i = 0; macinfo_table && macinfo_table->iterate (ix: i, ptr: &ref); i++) |
29237 | { |
29238 | switch (ref->code) |
29239 | { |
29240 | /* Match the logic in output_macinfo_op to decide on |
29241 | indirect strings. */ |
29242 | case DW_MACINFO_define: |
29243 | case DW_MACINFO_undef: |
29244 | len = strlen (s: ref->info) + 1; |
29245 | if ((!dwarf_strict || dwarf_version >= 5) |
29246 | && len > (unsigned) dwarf_offset_size |
29247 | && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET |
29248 | && (debug_str_section->common.flags & SECTION_MERGE) != 0) |
29249 | set_indirect_string (find_AT_string (str: ref->info)); |
29250 | break; |
29251 | case DW_MACINFO_start_file: |
29252 | /* -gsplit-dwarf -g3 will also output filename as indirect |
29253 | string. */ |
29254 | if (!dwarf_split_debug_info) |
29255 | break; |
29256 | /* Fall through. */ |
29257 | case DW_MACRO_define_strp: |
29258 | case DW_MACRO_undef_strp: |
29259 | case DW_MACRO_define_strx: |
29260 | case DW_MACRO_undef_strx: |
29261 | set_indirect_string (find_AT_string (str: ref->info)); |
29262 | break; |
29263 | default: |
29264 | break; |
29265 | } |
29266 | } |
29267 | } |
29268 | |
29269 | /* Output macinfo section(s). */ |
29270 | |
29271 | static void |
29272 | output_macinfo (const char *debug_line_label, bool early_lto_debug) |
29273 | { |
29274 | unsigned i; |
29275 | unsigned long length = vec_safe_length (v: macinfo_table); |
29276 | macinfo_entry *ref; |
29277 | vec<macinfo_entry, va_gc> *files = NULL; |
29278 | macinfo_hash_type *macinfo_htab = NULL; |
29279 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
29280 | |
29281 | if (! length) |
29282 | return; |
29283 | |
29284 | /* output_macinfo* uses these interchangeably. */ |
29285 | gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define |
29286 | && (int) DW_MACINFO_undef == (int) DW_MACRO_undef |
29287 | && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file |
29288 | && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file); |
29289 | |
29290 | /* AIX Assembler inserts the length, so adjust the reference to match the |
29291 | offset expected by debuggers. */ |
29292 | strcpy (dest: dl_section_ref, src: debug_line_label); |
29293 | if (XCOFF_DEBUGGING_INFO) |
29294 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
29295 | |
29296 | /* For .debug_macro emit the section header. */ |
29297 | if (!dwarf_strict || dwarf_version >= 5) |
29298 | { |
29299 | dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4, |
29300 | "DWARF macro version number" ); |
29301 | if (dwarf_offset_size == 8) |
29302 | dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present" ); |
29303 | else |
29304 | dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present" ); |
29305 | dw2_asm_output_offset (dwarf_offset_size, debug_line_label, |
29306 | debug_line_section, NULL); |
29307 | } |
29308 | |
29309 | /* In the first loop, it emits the primary .debug_macinfo section |
29310 | and after each emitted op the macinfo_entry is cleared. |
29311 | If a longer range of define/undef ops can be optimized using |
29312 | DW_MACRO_import, the DW_MACRO_import op is emitted and kept in |
29313 | the vector before the first define/undef in the range and the |
29314 | whole range of define/undef ops is not emitted and kept. */ |
29315 | for (i = 0; macinfo_table->iterate (ix: i, ptr: &ref); i++) |
29316 | { |
29317 | switch (ref->code) |
29318 | { |
29319 | case DW_MACINFO_start_file: |
29320 | vec_safe_push (v&: files, obj: *ref); |
29321 | break; |
29322 | case DW_MACINFO_end_file: |
29323 | if (!vec_safe_is_empty (v: files)) |
29324 | files->pop (); |
29325 | break; |
29326 | case DW_MACINFO_define: |
29327 | case DW_MACINFO_undef: |
29328 | if ((!dwarf_strict || dwarf_version >= 5) |
29329 | && !dwarf_split_debug_info |
29330 | && HAVE_COMDAT_GROUP |
29331 | && vec_safe_length (v: files) != 1 |
29332 | && i > 0 |
29333 | && i + 1 < length |
29334 | && (*macinfo_table)[i - 1].code == 0) |
29335 | { |
29336 | unsigned count = optimize_macinfo_range (idx: i, files, macinfo_htab: &macinfo_htab); |
29337 | if (count) |
29338 | { |
29339 | i += count - 1; |
29340 | continue; |
29341 | } |
29342 | } |
29343 | break; |
29344 | case 0: |
29345 | /* A dummy entry may be inserted at the beginning to be able |
29346 | to optimize the whole block of predefined macros. */ |
29347 | if (i == 0) |
29348 | continue; |
29349 | default: |
29350 | break; |
29351 | } |
29352 | output_macinfo_op (ref); |
29353 | ref->info = NULL; |
29354 | ref->code = 0; |
29355 | } |
29356 | |
29357 | if (!macinfo_htab) |
29358 | return; |
29359 | |
29360 | /* Save the number of transparent includes so we can adjust the |
29361 | label number for the fat LTO object DWARF. */ |
29362 | unsigned macinfo_label_base_adj = macinfo_htab->elements (); |
29363 | |
29364 | delete macinfo_htab; |
29365 | macinfo_htab = NULL; |
29366 | |
29367 | /* If any DW_MACRO_import were used, on those DW_MACRO_import entries |
29368 | terminate the current chain and switch to a new comdat .debug_macinfo |
29369 | section and emit the define/undef entries within it. */ |
29370 | for (i = 0; macinfo_table->iterate (ix: i, ptr: &ref); i++) |
29371 | switch (ref->code) |
29372 | { |
29373 | case 0: |
29374 | continue; |
29375 | case DW_MACRO_import: |
29376 | { |
29377 | char label[MAX_ARTIFICIAL_LABEL_BYTES]; |
29378 | tree comdat_key = get_identifier (ref->info); |
29379 | /* Terminate the previous .debug_macinfo section. */ |
29380 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
29381 | targetm.asm_out.named_section (debug_macinfo_section_name, |
29382 | SECTION_DEBUG |
29383 | | SECTION_LINKONCE |
29384 | | (early_lto_debug |
29385 | ? SECTION_EXCLUDE : 0), |
29386 | comdat_key); |
29387 | ASM_GENERATE_INTERNAL_LABEL (label, |
29388 | DEBUG_MACRO_SECTION_LABEL, |
29389 | ref->lineno + macinfo_label_base); |
29390 | ASM_OUTPUT_LABEL (asm_out_file, label); |
29391 | ref->code = 0; |
29392 | ref->info = NULL; |
29393 | dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4, |
29394 | "DWARF macro version number" ); |
29395 | if (dwarf_offset_size == 8) |
29396 | dw2_asm_output_data (1, 1, "Flags: 64-bit" ); |
29397 | else |
29398 | dw2_asm_output_data (1, 0, "Flags: 32-bit" ); |
29399 | } |
29400 | break; |
29401 | case DW_MACINFO_define: |
29402 | case DW_MACINFO_undef: |
29403 | output_macinfo_op (ref); |
29404 | ref->code = 0; |
29405 | ref->info = NULL; |
29406 | break; |
29407 | default: |
29408 | gcc_unreachable (); |
29409 | } |
29410 | |
29411 | macinfo_label_base += macinfo_label_base_adj; |
29412 | } |
29413 | |
29414 | /* As init_sections_and_labels may get called multiple times, have a |
29415 | generation count for labels. */ |
29416 | static unsigned init_sections_and_labels_generation; |
29417 | |
29418 | /* Initialize the various sections and labels for dwarf output and prefix |
29419 | them with PREFIX if non-NULL. Returns the generation (zero based |
29420 | number of times function was called). */ |
29421 | |
29422 | static unsigned |
29423 | init_sections_and_labels (bool early_lto_debug) |
29424 | { |
29425 | if (early_lto_debug) |
29426 | { |
29427 | if (!dwarf_split_debug_info) |
29428 | { |
29429 | debug_info_section = get_section (DEBUG_LTO_INFO_SECTION, |
29430 | SECTION_DEBUG | SECTION_EXCLUDE, |
29431 | NULL); |
29432 | debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION, |
29433 | SECTION_DEBUG | SECTION_EXCLUDE, |
29434 | NULL); |
29435 | debug_macinfo_section_name |
29436 | = ((dwarf_strict && dwarf_version < 5) |
29437 | ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION); |
29438 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
29439 | SECTION_DEBUG |
29440 | | SECTION_EXCLUDE, NULL); |
29441 | } |
29442 | else |
29443 | { |
29444 | /* ??? Which of the following do we need early? */ |
29445 | debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION, |
29446 | SECTION_DEBUG | SECTION_EXCLUDE, |
29447 | NULL); |
29448 | debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION, |
29449 | SECTION_DEBUG | SECTION_EXCLUDE, |
29450 | NULL); |
29451 | debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION, |
29452 | SECTION_DEBUG |
29453 | | SECTION_EXCLUDE, NULL); |
29454 | debug_skeleton_abbrev_section |
29455 | = get_section (DEBUG_LTO_ABBREV_SECTION, |
29456 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29457 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label, |
29458 | DEBUG_SKELETON_ABBREV_SECTION_LABEL, |
29459 | init_sections_and_labels_generation); |
29460 | |
29461 | /* Somewhat confusing detail: The skeleton_[abbrev|info] sections |
29462 | stay in the main .o, but the skeleton_line goes into the split |
29463 | off dwo. */ |
29464 | debug_skeleton_line_section |
29465 | = get_section (DEBUG_LTO_LINE_SECTION, |
29466 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29467 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label, |
29468 | DEBUG_SKELETON_LINE_SECTION_LABEL, |
29469 | init_sections_and_labels_generation); |
29470 | debug_str_offsets_section |
29471 | = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION, |
29472 | SECTION_DEBUG | SECTION_EXCLUDE, |
29473 | NULL); |
29474 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label, |
29475 | DEBUG_SKELETON_INFO_SECTION_LABEL, |
29476 | init_sections_and_labels_generation); |
29477 | debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION, |
29478 | DEBUG_STR_DWO_SECTION_FLAGS, |
29479 | NULL); |
29480 | debug_macinfo_section_name |
29481 | = ((dwarf_strict && dwarf_version < 5) |
29482 | ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION); |
29483 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
29484 | SECTION_DEBUG | SECTION_EXCLUDE, |
29485 | NULL); |
29486 | } |
29487 | /* For macro info and the file table we have to refer to a |
29488 | debug_line section. */ |
29489 | debug_line_section = get_section (DEBUG_LTO_LINE_SECTION, |
29490 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29491 | ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label, |
29492 | DEBUG_LINE_SECTION_LABEL, |
29493 | init_sections_and_labels_generation); |
29494 | |
29495 | debug_str_section = get_section (DEBUG_LTO_STR_SECTION, |
29496 | DEBUG_STR_SECTION_FLAGS |
29497 | | SECTION_EXCLUDE, NULL); |
29498 | if (!dwarf_split_debug_info) |
29499 | debug_line_str_section |
29500 | = get_section (DEBUG_LTO_LINE_STR_SECTION, |
29501 | DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL); |
29502 | } |
29503 | else |
29504 | { |
29505 | if (!dwarf_split_debug_info) |
29506 | { |
29507 | debug_info_section = get_section (DEBUG_INFO_SECTION, |
29508 | SECTION_DEBUG, NULL); |
29509 | debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION, |
29510 | SECTION_DEBUG, NULL); |
29511 | debug_loc_section = get_section (dwarf_version >= 5 |
29512 | ? DEBUG_LOCLISTS_SECTION |
29513 | : DEBUG_LOC_SECTION, |
29514 | SECTION_DEBUG, NULL); |
29515 | debug_macinfo_section_name |
29516 | = ((dwarf_strict && dwarf_version < 5) |
29517 | ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION); |
29518 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
29519 | SECTION_DEBUG, NULL); |
29520 | } |
29521 | else |
29522 | { |
29523 | debug_info_section = get_section (DEBUG_DWO_INFO_SECTION, |
29524 | SECTION_DEBUG | SECTION_EXCLUDE, |
29525 | NULL); |
29526 | debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION, |
29527 | SECTION_DEBUG | SECTION_EXCLUDE, |
29528 | NULL); |
29529 | debug_addr_section = get_section (DEBUG_ADDR_SECTION, |
29530 | SECTION_DEBUG, NULL); |
29531 | debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION, |
29532 | SECTION_DEBUG, NULL); |
29533 | debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION, |
29534 | SECTION_DEBUG, NULL); |
29535 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label, |
29536 | DEBUG_SKELETON_ABBREV_SECTION_LABEL, |
29537 | init_sections_and_labels_generation); |
29538 | |
29539 | /* Somewhat confusing detail: The skeleton_[abbrev|info] sections |
29540 | stay in the main .o, but the skeleton_line goes into the |
29541 | split off dwo. */ |
29542 | debug_skeleton_line_section |
29543 | = get_section (DEBUG_DWO_LINE_SECTION, |
29544 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29545 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label, |
29546 | DEBUG_SKELETON_LINE_SECTION_LABEL, |
29547 | init_sections_and_labels_generation); |
29548 | debug_str_offsets_section |
29549 | = get_section (DEBUG_DWO_STR_OFFSETS_SECTION, |
29550 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29551 | ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label, |
29552 | DEBUG_SKELETON_INFO_SECTION_LABEL, |
29553 | init_sections_and_labels_generation); |
29554 | debug_loc_section = get_section (dwarf_version >= 5 |
29555 | ? DEBUG_DWO_LOCLISTS_SECTION |
29556 | : DEBUG_DWO_LOC_SECTION, |
29557 | SECTION_DEBUG | SECTION_EXCLUDE, |
29558 | NULL); |
29559 | debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION, |
29560 | DEBUG_STR_DWO_SECTION_FLAGS, |
29561 | NULL); |
29562 | debug_macinfo_section_name |
29563 | = ((dwarf_strict && dwarf_version < 5) |
29564 | ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION); |
29565 | debug_macinfo_section = get_section (debug_macinfo_section_name, |
29566 | SECTION_DEBUG | SECTION_EXCLUDE, |
29567 | NULL); |
29568 | if (dwarf_version >= 5) |
29569 | debug_ranges_dwo_section |
29570 | = get_section (DEBUG_DWO_RNGLISTS_SECTION, |
29571 | SECTION_DEBUG | SECTION_EXCLUDE, NULL); |
29572 | } |
29573 | debug_aranges_section = get_section (DEBUG_ARANGES_SECTION, |
29574 | SECTION_DEBUG, NULL); |
29575 | debug_line_section = get_section (DEBUG_LINE_SECTION, |
29576 | SECTION_DEBUG, NULL); |
29577 | debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION, |
29578 | SECTION_DEBUG, NULL); |
29579 | debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION, |
29580 | SECTION_DEBUG, NULL); |
29581 | debug_str_section = get_section (DEBUG_STR_SECTION, |
29582 | DEBUG_STR_SECTION_FLAGS, NULL); |
29583 | if ((!dwarf_split_debug_info && !output_asm_line_debug_info ()) |
29584 | || asm_outputs_debug_line_str ()) |
29585 | debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION, |
29586 | DEBUG_STR_SECTION_FLAGS, NULL); |
29587 | |
29588 | debug_ranges_section = get_section (dwarf_version >= 5 |
29589 | ? DEBUG_RNGLISTS_SECTION |
29590 | : DEBUG_RANGES_SECTION, |
29591 | SECTION_DEBUG, NULL); |
29592 | debug_frame_section = get_section (DEBUG_FRAME_SECTION, |
29593 | SECTION_DEBUG, NULL); |
29594 | } |
29595 | |
29596 | ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label, |
29597 | DEBUG_ABBREV_SECTION_LABEL, |
29598 | init_sections_and_labels_generation); |
29599 | ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label, |
29600 | DEBUG_INFO_SECTION_LABEL, |
29601 | init_sections_and_labels_generation); |
29602 | info_section_emitted = false; |
29603 | ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label, |
29604 | DEBUG_LINE_SECTION_LABEL, |
29605 | init_sections_and_labels_generation); |
29606 | /* There are up to 6 unique ranges labels per generation. |
29607 | See also output_rnglists. */ |
29608 | ASM_GENERATE_INTERNAL_LABEL (ranges_section_label, |
29609 | DEBUG_RANGES_SECTION_LABEL, |
29610 | init_sections_and_labels_generation * 6); |
29611 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
29612 | ASM_GENERATE_INTERNAL_LABEL (ranges_base_label, |
29613 | DEBUG_RANGES_SECTION_LABEL, |
29614 | 1 + init_sections_and_labels_generation * 6); |
29615 | ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label, |
29616 | DEBUG_ADDR_SECTION_LABEL, |
29617 | init_sections_and_labels_generation); |
29618 | ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label, |
29619 | (dwarf_strict && dwarf_version < 5) |
29620 | ? DEBUG_MACINFO_SECTION_LABEL |
29621 | : DEBUG_MACRO_SECTION_LABEL, |
29622 | init_sections_and_labels_generation); |
29623 | ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL, |
29624 | init_sections_and_labels_generation); |
29625 | |
29626 | ++init_sections_and_labels_generation; |
29627 | return init_sections_and_labels_generation - 1; |
29628 | } |
29629 | |
29630 | /* Set up for Dwarf output at the start of compilation. */ |
29631 | |
29632 | static void |
29633 | dwarf2out_init (const char *filename ATTRIBUTE_UNUSED) |
29634 | { |
29635 | /* Allocate the file_table. */ |
29636 | file_table = hash_table<dwarf_file_hasher>::create_ggc (n: 50); |
29637 | |
29638 | #ifndef DWARF2_LINENO_DEBUGGING_INFO |
29639 | /* Allocate the decl_die_table. */ |
29640 | decl_die_table = hash_table<decl_die_hasher>::create_ggc (n: 10); |
29641 | |
29642 | /* Allocate the decl_loc_table. */ |
29643 | decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (n: 10); |
29644 | |
29645 | /* Allocate the cached_dw_loc_list_table. */ |
29646 | cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (n: 10); |
29647 | |
29648 | /* Allocate the initial hunk of the abbrev_die_table. */ |
29649 | vec_alloc (v&: abbrev_die_table, nelems: 256); |
29650 | /* Zero-th entry is allocated, but unused. */ |
29651 | abbrev_die_table->quick_push (NULL); |
29652 | |
29653 | /* Allocate the dwarf_proc_stack_usage_map. */ |
29654 | dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>; |
29655 | |
29656 | /* Allocate the pubtypes and pubnames vectors. */ |
29657 | vec_alloc (v&: pubname_table, nelems: 32); |
29658 | vec_alloc (v&: pubtype_table, nelems: 32); |
29659 | |
29660 | vec_alloc (v&: incomplete_types, nelems: 64); |
29661 | |
29662 | vec_alloc (v&: used_rtx_array, nelems: 32); |
29663 | |
29664 | if (debug_info_level >= DINFO_LEVEL_VERBOSE) |
29665 | vec_alloc (v&: macinfo_table, nelems: 64); |
29666 | #endif |
29667 | |
29668 | /* If front-ends already registered a main translation unit but we were not |
29669 | ready to perform the association, do this now. */ |
29670 | if (main_translation_unit != NULL_TREE) |
29671 | equate_decl_number_to_die (decl: main_translation_unit, decl_die: comp_unit_die ()); |
29672 | } |
29673 | |
29674 | /* Called before compile () starts outputtting functions, variables |
29675 | and toplevel asms into assembly. */ |
29676 | |
29677 | static void |
29678 | dwarf2out_assembly_start (void) |
29679 | { |
29680 | if (text_section_line_info) |
29681 | return; |
29682 | |
29683 | #ifndef DWARF2_LINENO_DEBUGGING_INFO |
29684 | ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0); |
29685 | ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0); |
29686 | ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label, |
29687 | COLD_TEXT_SECTION_LABEL, 0); |
29688 | ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0); |
29689 | |
29690 | switch_to_section (text_section); |
29691 | ASM_OUTPUT_LABEL (asm_out_file, text_section_label); |
29692 | #endif |
29693 | |
29694 | /* Make sure the line number table for .text always exists. */ |
29695 | text_section_line_info = new_line_info_table (); |
29696 | text_section_line_info->end_label = text_end_label; |
29697 | |
29698 | #ifdef DWARF2_LINENO_DEBUGGING_INFO |
29699 | cur_line_info_table = text_section_line_info; |
29700 | #endif |
29701 | |
29702 | if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE |
29703 | && dwarf2out_do_cfi_asm () |
29704 | && !dwarf2out_do_eh_frame ()) |
29705 | fprintf (stream: asm_out_file, format: "\t.cfi_sections\t.debug_frame\n" ); |
29706 | |
29707 | #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG) |
29708 | if (output_asm_line_debug_info () && dwarf_version >= 5) |
29709 | { |
29710 | /* When gas outputs DWARF5 .debug_line[_str] then we have to |
29711 | tell it the comp_dir and main file name for the zero entry |
29712 | line table. */ |
29713 | const char *comp_dir, *filename0; |
29714 | |
29715 | comp_dir = comp_dir_string (); |
29716 | if (comp_dir == NULL) |
29717 | comp_dir = "" ; |
29718 | |
29719 | filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
29720 | if (filename0 == NULL) |
29721 | filename0 = "" ; |
29722 | |
29723 | fprintf (stream: asm_out_file, format: "\t.file 0 " ); |
29724 | output_quoted_string (asm_out_file, remap_debug_filename (comp_dir)); |
29725 | fputc (c: ' ', stream: asm_out_file); |
29726 | output_quoted_string (asm_out_file, remap_debug_filename (filename0)); |
29727 | fputc (c: '\n', stream: asm_out_file); |
29728 | } |
29729 | else |
29730 | #endif |
29731 | /* Work around for PR101575: output a dummy .file directive. */ |
29732 | if (!last_emitted_file && dwarf_debuginfo_p () |
29733 | && debug_info_level >= DINFO_LEVEL_TERSE) |
29734 | { |
29735 | const char *filename0 = get_AT_string (die: comp_unit_die (), attr_kind: DW_AT_name); |
29736 | |
29737 | if (filename0 == NULL) |
29738 | filename0 = "<dummy>" ; |
29739 | maybe_emit_file (fd: lookup_filename (file_name: filename0)); |
29740 | } |
29741 | } |
29742 | |
29743 | /* A helper function for dwarf2out_finish called through |
29744 | htab_traverse. Assign a string its index. All strings must be |
29745 | collected into the table by the time index_string is called, |
29746 | because the indexing code relies on htab_traverse to traverse nodes |
29747 | in the same order for each run. */ |
29748 | |
29749 | int |
29750 | index_string (indirect_string_node **h, unsigned int *index) |
29751 | { |
29752 | indirect_string_node *node = *h; |
29753 | |
29754 | find_string_form (node); |
29755 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
29756 | { |
29757 | gcc_assert (node->index == NO_INDEX_ASSIGNED); |
29758 | node->index = *index; |
29759 | *index += 1; |
29760 | } |
29761 | return 1; |
29762 | } |
29763 | |
29764 | /* A helper function for output_indirect_strings called through |
29765 | htab_traverse. Output the offset to a string and update the |
29766 | current offset. */ |
29767 | |
29768 | int |
29769 | output_index_string_offset (indirect_string_node **h, unsigned int *offset) |
29770 | { |
29771 | indirect_string_node *node = *h; |
29772 | |
29773 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
29774 | { |
29775 | /* Assert that this node has been assigned an index. */ |
29776 | gcc_assert (node->index != NO_INDEX_ASSIGNED |
29777 | && node->index != NOT_INDEXED); |
29778 | dw2_asm_output_data (dwarf_offset_size, *offset, |
29779 | "indexed string 0x%x: %s" , node->index, node->str); |
29780 | *offset += strlen (s: node->str) + 1; |
29781 | } |
29782 | return 1; |
29783 | } |
29784 | |
29785 | /* A helper function for dwarf2out_finish called through |
29786 | htab_traverse. Output the indexed string. */ |
29787 | |
29788 | int |
29789 | output_index_string (indirect_string_node **h, unsigned int *cur_idx) |
29790 | { |
29791 | struct indirect_string_node *node = *h; |
29792 | |
29793 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
29794 | { |
29795 | /* Assert that the strings are output in the same order as their |
29796 | indexes were assigned. */ |
29797 | gcc_assert (*cur_idx == node->index); |
29798 | assemble_string (node->str, strlen (s: node->str) + 1); |
29799 | *cur_idx += 1; |
29800 | } |
29801 | return 1; |
29802 | } |
29803 | |
29804 | /* A helper function for output_indirect_strings. Counts the number |
29805 | of index strings offsets. Must match the logic of the functions |
29806 | output_index_string[_offsets] above. */ |
29807 | int |
29808 | count_index_strings (indirect_string_node **h, unsigned int *last_idx) |
29809 | { |
29810 | struct indirect_string_node *node = *h; |
29811 | |
29812 | if (node->form == dwarf_FORM (form: DW_FORM_strx) && node->refcount > 0) |
29813 | *last_idx += 1; |
29814 | return 1; |
29815 | } |
29816 | |
29817 | /* A helper function for dwarf2out_finish called through |
29818 | htab_traverse. Emit one queued .debug_str string. */ |
29819 | |
29820 | int |
29821 | output_indirect_string (indirect_string_node **h, enum dwarf_form form) |
29822 | { |
29823 | struct indirect_string_node *node = *h; |
29824 | |
29825 | node->form = find_string_form (node); |
29826 | if (node->form == form && node->refcount > 0) |
29827 | { |
29828 | ASM_OUTPUT_LABEL (asm_out_file, node->label); |
29829 | assemble_string (node->str, strlen (s: node->str) + 1); |
29830 | } |
29831 | |
29832 | return 1; |
29833 | } |
29834 | |
29835 | /* Output the indexed string table. */ |
29836 | |
29837 | static void |
29838 | output_indirect_strings (void) |
29839 | { |
29840 | switch_to_section (debug_str_section); |
29841 | if (!dwarf_split_debug_info) |
29842 | debug_str_hash->traverse<enum dwarf_form, |
29843 | output_indirect_string> (argument: DW_FORM_strp); |
29844 | else |
29845 | { |
29846 | unsigned int offset = 0; |
29847 | unsigned int cur_idx = 0; |
29848 | |
29849 | if (skeleton_debug_str_hash) |
29850 | skeleton_debug_str_hash->traverse<enum dwarf_form, |
29851 | output_indirect_string> (argument: DW_FORM_strp); |
29852 | |
29853 | switch_to_section (debug_str_offsets_section); |
29854 | /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit |
29855 | header. Note that we don't need to generate a label to the |
29856 | actual index table following the header here, because this is |
29857 | for the split dwarf case only. In an .dwo file there is only |
29858 | one string offsets table (and one debug info section). But |
29859 | if we would start using string offset tables for the main (or |
29860 | skeleton) unit, then we have to add a DW_AT_str_offsets_base |
29861 | pointing to the actual index after the header. Split dwarf |
29862 | units will never have a string offsets base attribute. When |
29863 | a split unit is moved into a .dwp file the string offsets can |
29864 | be found through the .debug_cu_index section table. */ |
29865 | if (dwarf_version >= 5) |
29866 | { |
29867 | unsigned int last_idx = 0; |
29868 | unsigned long str_offsets_length; |
29869 | |
29870 | debug_str_hash->traverse_noresize |
29871 | <unsigned int *, count_index_strings> (argument: &last_idx); |
29872 | str_offsets_length = last_idx * dwarf_offset_size + 4; |
29873 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
29874 | dw2_asm_output_data (4, 0xffffffff, |
29875 | "Escape value for 64-bit DWARF extension" ); |
29876 | dw2_asm_output_data (dwarf_offset_size, str_offsets_length, |
29877 | "Length of string offsets unit" ); |
29878 | dw2_asm_output_data (2, 5, "DWARF string offsets version" ); |
29879 | dw2_asm_output_data (2, 0, "Header zero padding" ); |
29880 | } |
29881 | debug_str_hash->traverse_noresize |
29882 | <unsigned int *, output_index_string_offset> (argument: &offset); |
29883 | switch_to_section (debug_str_dwo_section); |
29884 | debug_str_hash->traverse_noresize<unsigned int *, output_index_string> |
29885 | (argument: &cur_idx); |
29886 | } |
29887 | } |
29888 | |
29889 | /* Callback for htab_traverse to assign an index to an entry in the |
29890 | table, and to write that entry to the .debug_addr section. */ |
29891 | |
29892 | int |
29893 | output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index) |
29894 | { |
29895 | addr_table_entry *entry = *slot; |
29896 | |
29897 | if (entry->refcount == 0) |
29898 | { |
29899 | gcc_assert (entry->index == NO_INDEX_ASSIGNED |
29900 | || entry->index == NOT_INDEXED); |
29901 | return 1; |
29902 | } |
29903 | |
29904 | gcc_assert (entry->index == *cur_index); |
29905 | (*cur_index)++; |
29906 | |
29907 | switch (entry->kind) |
29908 | { |
29909 | case ate_kind_rtx: |
29910 | dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl, |
29911 | "0x%x" , entry->index); |
29912 | break; |
29913 | case ate_kind_rtx_dtprel: |
29914 | gcc_assert (targetm.asm_out.output_dwarf_dtprel); |
29915 | targetm.asm_out.output_dwarf_dtprel (asm_out_file, |
29916 | DWARF2_ADDR_SIZE, |
29917 | entry->addr.rtl); |
29918 | fputc (c: '\n', stream: asm_out_file); |
29919 | break; |
29920 | case ate_kind_label: |
29921 | dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label, |
29922 | "0x%x" , entry->index); |
29923 | break; |
29924 | default: |
29925 | gcc_unreachable (); |
29926 | } |
29927 | return 1; |
29928 | } |
29929 | |
29930 | /* A helper function for dwarf2out_finish. Counts the number |
29931 | of indexed addresses. Must match the logic of the functions |
29932 | output_addr_table_entry above. */ |
29933 | int |
29934 | count_index_addrs (addr_table_entry **slot, unsigned int *last_idx) |
29935 | { |
29936 | addr_table_entry *entry = *slot; |
29937 | |
29938 | if (entry->refcount > 0) |
29939 | *last_idx += 1; |
29940 | return 1; |
29941 | } |
29942 | |
29943 | /* Produce the .debug_addr section. */ |
29944 | |
29945 | static void |
29946 | output_addr_table (void) |
29947 | { |
29948 | unsigned int index = 0; |
29949 | if (addr_index_table == NULL || addr_index_table->size () == 0) |
29950 | return; |
29951 | |
29952 | switch_to_section (debug_addr_section); |
29953 | /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission |
29954 | which GCC uses to implement -gsplit-dwarf as DWARF GNU extension |
29955 | before DWARF5, didn't have a header for .debug_addr units. |
29956 | DWARF5 specifies a small header when address tables are used. */ |
29957 | if (dwarf_version >= 5) |
29958 | { |
29959 | unsigned int last_idx = 0; |
29960 | unsigned long addrs_length; |
29961 | |
29962 | addr_index_table->traverse_noresize |
29963 | <unsigned int *, count_index_addrs> (argument: &last_idx); |
29964 | addrs_length = last_idx * DWARF2_ADDR_SIZE + 4; |
29965 | |
29966 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
29967 | dw2_asm_output_data (4, 0xffffffff, |
29968 | "Escape value for 64-bit DWARF extension" ); |
29969 | dw2_asm_output_data (dwarf_offset_size, addrs_length, |
29970 | "Length of Address Unit" ); |
29971 | dw2_asm_output_data (2, 5, "DWARF addr version" ); |
29972 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address" ); |
29973 | dw2_asm_output_data (1, 0, "Size of Segment Descriptor" ); |
29974 | } |
29975 | ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label); |
29976 | |
29977 | addr_index_table |
29978 | ->traverse_noresize<unsigned int *, output_addr_table_entry> (argument: &index); |
29979 | } |
29980 | |
29981 | #if ENABLE_ASSERT_CHECKING |
29982 | /* Verify that all marks are clear. */ |
29983 | |
29984 | static void |
29985 | verify_marks_clear (dw_die_ref die) |
29986 | { |
29987 | dw_die_ref c; |
29988 | |
29989 | gcc_assert (! die->die_mark); |
29990 | FOR_EACH_CHILD (die, c, verify_marks_clear (c)); |
29991 | } |
29992 | #endif /* ENABLE_ASSERT_CHECKING */ |
29993 | |
29994 | /* Clear the marks for a die and its children. |
29995 | Be cool if the mark isn't set. */ |
29996 | |
29997 | static void |
29998 | prune_unmark_dies (dw_die_ref die) |
29999 | { |
30000 | dw_die_ref c; |
30001 | |
30002 | if (die->die_mark) |
30003 | die->die_mark = 0; |
30004 | FOR_EACH_CHILD (die, c, prune_unmark_dies (c)); |
30005 | } |
30006 | |
30007 | /* Given LOC that is referenced by a DIE we're marking as used, find all |
30008 | referenced DWARF procedures it references and mark them as used. */ |
30009 | |
30010 | static void |
30011 | prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc) |
30012 | { |
30013 | for (; loc != NULL; loc = loc->dw_loc_next) |
30014 | switch (loc->dw_loc_opc) |
30015 | { |
30016 | case DW_OP_implicit_pointer: |
30017 | case DW_OP_convert: |
30018 | case DW_OP_reinterpret: |
30019 | case DW_OP_GNU_implicit_pointer: |
30020 | case DW_OP_GNU_convert: |
30021 | case DW_OP_GNU_reinterpret: |
30022 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref) |
30023 | prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1); |
30024 | break; |
30025 | case DW_OP_GNU_variable_value: |
30026 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
30027 | { |
30028 | dw_die_ref ref |
30029 | = lookup_decl_die (decl: loc->dw_loc_oprnd1.v.val_decl_ref); |
30030 | if (ref == NULL) |
30031 | break; |
30032 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
30033 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
30034 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
30035 | } |
30036 | /* FALLTHRU */ |
30037 | case DW_OP_call2: |
30038 | case DW_OP_call4: |
30039 | case DW_OP_call_ref: |
30040 | case DW_OP_const_type: |
30041 | case DW_OP_GNU_const_type: |
30042 | case DW_OP_GNU_parameter_ref: |
30043 | gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref); |
30044 | prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1); |
30045 | break; |
30046 | case DW_OP_regval_type: |
30047 | case DW_OP_deref_type: |
30048 | case DW_OP_GNU_regval_type: |
30049 | case DW_OP_GNU_deref_type: |
30050 | gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref); |
30051 | prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1); |
30052 | break; |
30053 | case DW_OP_entry_value: |
30054 | case DW_OP_GNU_entry_value: |
30055 | gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc); |
30056 | prune_unused_types_walk_loc_descr (loc: loc->dw_loc_oprnd1.v.val_loc); |
30057 | break; |
30058 | default: |
30059 | break; |
30060 | } |
30061 | } |
30062 | |
30063 | /* Given DIE that we're marking as used, find any other dies |
30064 | it references as attributes and mark them as used. */ |
30065 | |
30066 | static void |
30067 | prune_unused_types_walk_attribs (dw_die_ref die) |
30068 | { |
30069 | dw_attr_node *a; |
30070 | unsigned ix; |
30071 | |
30072 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
30073 | { |
30074 | switch (AT_class (a)) |
30075 | { |
30076 | /* Make sure DWARF procedures referenced by location descriptions will |
30077 | get emitted. */ |
30078 | case dw_val_class_loc: |
30079 | prune_unused_types_walk_loc_descr (loc: AT_loc (a)); |
30080 | break; |
30081 | case dw_val_class_loc_list: |
30082 | for (dw_loc_list_ref list = AT_loc_list (a); |
30083 | list != NULL; |
30084 | list = list->dw_loc_next) |
30085 | prune_unused_types_walk_loc_descr (loc: list->expr); |
30086 | break; |
30087 | |
30088 | case dw_val_class_view_list: |
30089 | /* This points to a loc_list in another attribute, so it's |
30090 | already covered. */ |
30091 | break; |
30092 | |
30093 | case dw_val_class_die_ref: |
30094 | /* A reference to another DIE. |
30095 | Make sure that it will get emitted. |
30096 | If it was broken out into a comdat group, don't follow it. */ |
30097 | if (! AT_ref (a)->comdat_type_p |
30098 | || a->dw_attr == DW_AT_specification) |
30099 | prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1); |
30100 | break; |
30101 | |
30102 | case dw_val_class_str: |
30103 | /* Set the string's refcount to 0 so that prune_unused_types_mark |
30104 | accounts properly for it. */ |
30105 | a->dw_attr_val.v.val_str->refcount = 0; |
30106 | break; |
30107 | |
30108 | default: |
30109 | break; |
30110 | } |
30111 | } |
30112 | } |
30113 | |
30114 | /* Mark the generic parameters and arguments children DIEs of DIE. */ |
30115 | |
30116 | static void |
30117 | prune_unused_types_mark_generic_parms_dies (dw_die_ref die) |
30118 | { |
30119 | dw_die_ref c; |
30120 | |
30121 | if (die == NULL || die->die_child == NULL) |
30122 | return; |
30123 | c = die->die_child; |
30124 | do |
30125 | { |
30126 | if (is_template_parameter (die: c)) |
30127 | prune_unused_types_mark (c, 1); |
30128 | c = c->die_sib; |
30129 | } while (c && c != die->die_child); |
30130 | } |
30131 | |
30132 | /* Mark DIE as being used. If DOKIDS is true, then walk down |
30133 | to DIE's children. */ |
30134 | |
30135 | static void |
30136 | prune_unused_types_mark (dw_die_ref die, int dokids) |
30137 | { |
30138 | dw_die_ref c; |
30139 | |
30140 | if (die->die_mark == 0) |
30141 | { |
30142 | /* We haven't done this node yet. Mark it as used. */ |
30143 | die->die_mark = 1; |
30144 | /* If this is the DIE of a generic type instantiation, |
30145 | mark the children DIEs that describe its generic parms and |
30146 | args. */ |
30147 | prune_unused_types_mark_generic_parms_dies (die); |
30148 | |
30149 | /* We also have to mark its parents as used. |
30150 | (But we don't want to mark our parent's kids due to this, |
30151 | unless it is a class.) */ |
30152 | if (die->die_parent) |
30153 | prune_unused_types_mark (die: die->die_parent, |
30154 | dokids: class_scope_p (context_die: die->die_parent)); |
30155 | |
30156 | /* Mark any referenced nodes. */ |
30157 | prune_unused_types_walk_attribs (die); |
30158 | |
30159 | /* If this node is a specification, |
30160 | also mark the definition, if it exists. */ |
30161 | if (get_AT_flag (die, attr_kind: DW_AT_declaration) && die->die_definition) |
30162 | prune_unused_types_mark (die: die->die_definition, dokids: 1); |
30163 | } |
30164 | |
30165 | if (dokids && die->die_mark != 2) |
30166 | { |
30167 | /* We need to walk the children, but haven't done so yet. |
30168 | Remember that we've walked the kids. */ |
30169 | die->die_mark = 2; |
30170 | |
30171 | /* If this is an array type, we need to make sure our |
30172 | kids get marked, even if they're types. If we're |
30173 | breaking out types into comdat sections, do this |
30174 | for all type definitions. */ |
30175 | if (die->die_tag == DW_TAG_array_type |
30176 | || (use_debug_types |
30177 | && is_type_die (die) && ! is_declaration_die (die))) |
30178 | FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1)); |
30179 | else |
30180 | FOR_EACH_CHILD (die, c, prune_unused_types_walk (c)); |
30181 | } |
30182 | } |
30183 | |
30184 | /* For local classes, look if any static member functions were emitted |
30185 | and if so, mark them. */ |
30186 | |
30187 | static void |
30188 | prune_unused_types_walk_local_classes (dw_die_ref die) |
30189 | { |
30190 | dw_die_ref c; |
30191 | |
30192 | if (die->die_mark == 2) |
30193 | return; |
30194 | |
30195 | switch (die->die_tag) |
30196 | { |
30197 | case DW_TAG_structure_type: |
30198 | case DW_TAG_union_type: |
30199 | case DW_TAG_class_type: |
30200 | case DW_TAG_interface_type: |
30201 | break; |
30202 | |
30203 | case DW_TAG_subprogram: |
30204 | if (!get_AT_flag (die, attr_kind: DW_AT_declaration) |
30205 | || die->die_definition != NULL) |
30206 | prune_unused_types_mark (die, dokids: 1); |
30207 | return; |
30208 | |
30209 | default: |
30210 | return; |
30211 | } |
30212 | |
30213 | /* Mark children. */ |
30214 | FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c)); |
30215 | } |
30216 | |
30217 | /* Walk the tree DIE and mark types that we actually use. */ |
30218 | |
30219 | static void |
30220 | prune_unused_types_walk (dw_die_ref die) |
30221 | { |
30222 | dw_die_ref c; |
30223 | |
30224 | /* Don't do anything if this node is already marked and |
30225 | children have been marked as well. */ |
30226 | if (die->die_mark == 2) |
30227 | return; |
30228 | |
30229 | switch (die->die_tag) |
30230 | { |
30231 | case DW_TAG_structure_type: |
30232 | case DW_TAG_union_type: |
30233 | case DW_TAG_class_type: |
30234 | case DW_TAG_interface_type: |
30235 | if (die->die_perennial_p) |
30236 | break; |
30237 | |
30238 | for (c = die->die_parent; c; c = c->die_parent) |
30239 | if (c->die_tag == DW_TAG_subprogram) |
30240 | break; |
30241 | |
30242 | /* Finding used static member functions inside of classes |
30243 | is needed just for local classes, because for other classes |
30244 | static member function DIEs with DW_AT_specification |
30245 | are emitted outside of the DW_TAG_*_type. If we ever change |
30246 | it, we'd need to call this even for non-local classes. */ |
30247 | if (c) |
30248 | prune_unused_types_walk_local_classes (die); |
30249 | |
30250 | /* It's a type node --- don't mark it. */ |
30251 | return; |
30252 | |
30253 | case DW_TAG_const_type: |
30254 | case DW_TAG_packed_type: |
30255 | case DW_TAG_pointer_type: |
30256 | case DW_TAG_reference_type: |
30257 | case DW_TAG_rvalue_reference_type: |
30258 | case DW_TAG_volatile_type: |
30259 | case DW_TAG_restrict_type: |
30260 | case DW_TAG_shared_type: |
30261 | case DW_TAG_atomic_type: |
30262 | case DW_TAG_immutable_type: |
30263 | case DW_TAG_typedef: |
30264 | case DW_TAG_array_type: |
30265 | case DW_TAG_coarray_type: |
30266 | case DW_TAG_friend: |
30267 | case DW_TAG_enumeration_type: |
30268 | case DW_TAG_subroutine_type: |
30269 | case DW_TAG_string_type: |
30270 | case DW_TAG_set_type: |
30271 | case DW_TAG_subrange_type: |
30272 | case DW_TAG_ptr_to_member_type: |
30273 | case DW_TAG_file_type: |
30274 | case DW_TAG_unspecified_type: |
30275 | case DW_TAG_dynamic_type: |
30276 | /* Type nodes are useful only when other DIEs reference them --- don't |
30277 | mark them. */ |
30278 | /* FALLTHROUGH */ |
30279 | |
30280 | case DW_TAG_dwarf_procedure: |
30281 | /* Likewise for DWARF procedures. */ |
30282 | |
30283 | if (die->die_perennial_p) |
30284 | break; |
30285 | |
30286 | return; |
30287 | |
30288 | case DW_TAG_variable: |
30289 | if (flag_debug_only_used_symbols) |
30290 | { |
30291 | if (die->die_perennial_p) |
30292 | break; |
30293 | |
30294 | /* For static data members, the declaration in the class is supposed |
30295 | to have DW_TAG_member tag in DWARF{3,4} but DW_TAG_variable in |
30296 | DWARF5. DW_TAG_member will be marked, so mark even such |
30297 | DW_TAG_variables in DWARF5, as long as it has DW_AT_const_value |
30298 | attribute. */ |
30299 | if (dwarf_version >= 5 |
30300 | && class_scope_p (context_die: die->die_parent) |
30301 | && get_AT (die, attr_kind: DW_AT_const_value)) |
30302 | break; |
30303 | |
30304 | /* premark_used_variables marks external variables --- don't mark |
30305 | them here. But function-local externals are always considered |
30306 | used. */ |
30307 | if (get_AT (die, attr_kind: DW_AT_external)) |
30308 | { |
30309 | for (c = die->die_parent; c; c = c->die_parent) |
30310 | if (c->die_tag == DW_TAG_subprogram) |
30311 | break; |
30312 | if (!c) |
30313 | return; |
30314 | } |
30315 | } |
30316 | /* FALLTHROUGH */ |
30317 | |
30318 | default: |
30319 | /* Mark everything else. */ |
30320 | break; |
30321 | } |
30322 | |
30323 | if (die->die_mark == 0) |
30324 | { |
30325 | die->die_mark = 1; |
30326 | |
30327 | /* Now, mark any dies referenced from here. */ |
30328 | prune_unused_types_walk_attribs (die); |
30329 | } |
30330 | |
30331 | die->die_mark = 2; |
30332 | |
30333 | /* Mark children. */ |
30334 | FOR_EACH_CHILD (die, c, prune_unused_types_walk (c)); |
30335 | } |
30336 | |
30337 | /* Increment the string counts on strings referred to from DIE's |
30338 | attributes. */ |
30339 | |
30340 | static void |
30341 | prune_unused_types_update_strings (dw_die_ref die) |
30342 | { |
30343 | dw_attr_node *a; |
30344 | unsigned ix; |
30345 | |
30346 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
30347 | if (AT_class (a) == dw_val_class_str) |
30348 | { |
30349 | struct indirect_string_node *s = a->dw_attr_val.v.val_str; |
30350 | s->refcount++; |
30351 | /* Avoid unnecessarily putting strings that are used less than |
30352 | twice in the hash table. */ |
30353 | if (s->form != DW_FORM_line_strp |
30354 | && (s->refcount |
30355 | == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))) |
30356 | { |
30357 | indirect_string_node **slot |
30358 | = debug_str_hash->find_slot_with_hash (comparable: s->str, |
30359 | hash: htab_hash_string (s->str), |
30360 | insert: INSERT); |
30361 | gcc_assert (*slot == NULL); |
30362 | *slot = s; |
30363 | } |
30364 | } |
30365 | } |
30366 | |
30367 | /* Mark DIE and its children as removed. */ |
30368 | |
30369 | static void |
30370 | mark_removed (dw_die_ref die) |
30371 | { |
30372 | dw_die_ref c; |
30373 | die->removed = true; |
30374 | FOR_EACH_CHILD (die, c, mark_removed (c)); |
30375 | } |
30376 | |
30377 | /* Remove from the tree DIE any dies that aren't marked. */ |
30378 | |
30379 | static void |
30380 | prune_unused_types_prune (dw_die_ref die) |
30381 | { |
30382 | dw_die_ref c; |
30383 | |
30384 | gcc_assert (die->die_mark); |
30385 | prune_unused_types_update_strings (die); |
30386 | |
30387 | if (! die->die_child) |
30388 | return; |
30389 | |
30390 | c = die->die_child; |
30391 | do { |
30392 | dw_die_ref prev = c, next; |
30393 | for (c = c->die_sib; ! c->die_mark; c = next) |
30394 | if (c == die->die_child) |
30395 | { |
30396 | /* No marked children between 'prev' and the end of the list. */ |
30397 | if (prev == c) |
30398 | /* No marked children at all. */ |
30399 | die->die_child = NULL; |
30400 | else |
30401 | { |
30402 | prev->die_sib = c->die_sib; |
30403 | die->die_child = prev; |
30404 | } |
30405 | c->die_sib = NULL; |
30406 | mark_removed (die: c); |
30407 | return; |
30408 | } |
30409 | else |
30410 | { |
30411 | next = c->die_sib; |
30412 | c->die_sib = NULL; |
30413 | mark_removed (die: c); |
30414 | } |
30415 | |
30416 | if (c != prev->die_sib) |
30417 | prev->die_sib = c; |
30418 | prune_unused_types_prune (die: c); |
30419 | } while (c != die->die_child); |
30420 | } |
30421 | |
30422 | /* Remove dies representing declarations that we never use. */ |
30423 | |
30424 | static void |
30425 | prune_unused_types (void) |
30426 | { |
30427 | unsigned int i; |
30428 | limbo_die_node *node; |
30429 | comdat_type_node *ctnode; |
30430 | pubname_entry *pub; |
30431 | dw_die_ref base_type; |
30432 | |
30433 | #if ENABLE_ASSERT_CHECKING |
30434 | /* All the marks should already be clear. */ |
30435 | verify_marks_clear (die: comp_unit_die ()); |
30436 | for (node = limbo_die_list; node; node = node->next) |
30437 | verify_marks_clear (die: node->die); |
30438 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
30439 | verify_marks_clear (die: ctnode->root_die); |
30440 | #endif /* ENABLE_ASSERT_CHECKING */ |
30441 | |
30442 | /* Mark types that are used in global variables. */ |
30443 | premark_types_used_by_global_vars (); |
30444 | |
30445 | /* Mark variables used in the symtab. */ |
30446 | if (flag_debug_only_used_symbols) |
30447 | premark_used_variables (); |
30448 | |
30449 | /* Set the mark on nodes that are actually used. */ |
30450 | prune_unused_types_walk (die: comp_unit_die ()); |
30451 | for (node = limbo_die_list; node; node = node->next) |
30452 | prune_unused_types_walk (die: node->die); |
30453 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
30454 | { |
30455 | prune_unused_types_walk (die: ctnode->root_die); |
30456 | prune_unused_types_mark (die: ctnode->type_die, dokids: 1); |
30457 | } |
30458 | |
30459 | /* Also set the mark on nodes referenced from the pubname_table. Enumerators |
30460 | are unusual in that they are pubnames that are the children of pubtypes. |
30461 | They should only be marked via their parent DW_TAG_enumeration_type die, |
30462 | not as roots in themselves. */ |
30463 | FOR_EACH_VEC_ELT (*pubname_table, i, pub) |
30464 | if (pub->die->die_tag != DW_TAG_enumerator) |
30465 | prune_unused_types_mark (die: pub->die, dokids: 1); |
30466 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
30467 | prune_unused_types_mark (die: base_type, dokids: 1); |
30468 | |
30469 | /* Also set the mark on nodes that could be referenced by |
30470 | DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or |
30471 | by DW_TAG_inlined_subroutine origins. */ |
30472 | cgraph_node *cnode; |
30473 | FOR_EACH_FUNCTION (cnode) |
30474 | if (cnode->referred_to_p (include_self: false)) |
30475 | { |
30476 | dw_die_ref die = lookup_decl_die (decl: cnode->decl); |
30477 | if (die == NULL || die->die_mark) |
30478 | continue; |
30479 | for (cgraph_edge *e = cnode->callers; e; e = e->next_caller) |
30480 | if (e->caller != cnode) |
30481 | { |
30482 | prune_unused_types_mark (die, dokids: 1); |
30483 | break; |
30484 | } |
30485 | } |
30486 | |
30487 | if (debug_str_hash) |
30488 | debug_str_hash->empty (); |
30489 | if (skeleton_debug_str_hash) |
30490 | skeleton_debug_str_hash->empty (); |
30491 | prune_unused_types_prune (die: comp_unit_die ()); |
30492 | for (limbo_die_node **pnode = &limbo_die_list; *pnode; ) |
30493 | { |
30494 | node = *pnode; |
30495 | if (!node->die->die_mark) |
30496 | *pnode = node->next; |
30497 | else |
30498 | { |
30499 | prune_unused_types_prune (die: node->die); |
30500 | pnode = &node->next; |
30501 | } |
30502 | } |
30503 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
30504 | prune_unused_types_prune (die: ctnode->root_die); |
30505 | |
30506 | /* Leave the marks clear. */ |
30507 | prune_unmark_dies (die: comp_unit_die ()); |
30508 | for (node = limbo_die_list; node; node = node->next) |
30509 | prune_unmark_dies (die: node->die); |
30510 | for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next) |
30511 | prune_unmark_dies (die: ctnode->root_die); |
30512 | } |
30513 | |
30514 | /* Helpers to manipulate hash table of comdat type units. */ |
30515 | |
30516 | struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node> |
30517 | { |
30518 | static inline hashval_t hash (const comdat_type_node *); |
30519 | static inline bool equal (const comdat_type_node *, const comdat_type_node *); |
30520 | }; |
30521 | |
30522 | inline hashval_t |
30523 | comdat_type_hasher::hash (const comdat_type_node *type_node) |
30524 | { |
30525 | hashval_t h; |
30526 | memcpy (dest: &h, src: type_node->signature, n: sizeof (h)); |
30527 | return h; |
30528 | } |
30529 | |
30530 | inline bool |
30531 | comdat_type_hasher::equal (const comdat_type_node *type_node_1, |
30532 | const comdat_type_node *type_node_2) |
30533 | { |
30534 | return (! memcmp (s1: type_node_1->signature, s2: type_node_2->signature, |
30535 | DWARF_TYPE_SIGNATURE_SIZE)); |
30536 | } |
30537 | |
30538 | /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref |
30539 | to the location it would have been added, should we know its |
30540 | DECL_ASSEMBLER_NAME when we added other attributes. This will |
30541 | probably improve compactness of debug info, removing equivalent |
30542 | abbrevs, and hide any differences caused by deferring the |
30543 | computation of the assembler name, triggered by e.g. PCH. */ |
30544 | |
30545 | static inline void |
30546 | move_linkage_attr (dw_die_ref die) |
30547 | { |
30548 | unsigned ix = vec_safe_length (v: die->die_attr); |
30549 | dw_attr_node linkage = (*die->die_attr)[ix - 1]; |
30550 | |
30551 | gcc_assert (linkage.dw_attr == DW_AT_linkage_name |
30552 | || linkage.dw_attr == DW_AT_MIPS_linkage_name); |
30553 | |
30554 | while (--ix > 0) |
30555 | { |
30556 | dw_attr_node *prev = &(*die->die_attr)[ix - 1]; |
30557 | |
30558 | if (prev->dw_attr == DW_AT_decl_line |
30559 | || prev->dw_attr == DW_AT_decl_column |
30560 | || prev->dw_attr == DW_AT_name) |
30561 | break; |
30562 | } |
30563 | |
30564 | if (ix != vec_safe_length (v: die->die_attr) - 1) |
30565 | { |
30566 | die->die_attr->pop (); |
30567 | die->die_attr->quick_insert (ix, obj: linkage); |
30568 | } |
30569 | } |
30570 | |
30571 | /* Helper function for resolve_addr, mark DW_TAG_base_type nodes |
30572 | referenced from typed stack ops and count how often they are used. */ |
30573 | |
30574 | static void |
30575 | mark_base_types (dw_loc_descr_ref loc) |
30576 | { |
30577 | dw_die_ref base_type = NULL; |
30578 | |
30579 | for (; loc; loc = loc->dw_loc_next) |
30580 | { |
30581 | switch (loc->dw_loc_opc) |
30582 | { |
30583 | case DW_OP_regval_type: |
30584 | case DW_OP_deref_type: |
30585 | case DW_OP_GNU_regval_type: |
30586 | case DW_OP_GNU_deref_type: |
30587 | base_type = loc->dw_loc_oprnd2.v.val_die_ref.die; |
30588 | break; |
30589 | case DW_OP_convert: |
30590 | case DW_OP_reinterpret: |
30591 | case DW_OP_GNU_convert: |
30592 | case DW_OP_GNU_reinterpret: |
30593 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const) |
30594 | continue; |
30595 | /* FALLTHRU */ |
30596 | case DW_OP_const_type: |
30597 | case DW_OP_GNU_const_type: |
30598 | base_type = loc->dw_loc_oprnd1.v.val_die_ref.die; |
30599 | break; |
30600 | case DW_OP_entry_value: |
30601 | case DW_OP_GNU_entry_value: |
30602 | mark_base_types (loc: loc->dw_loc_oprnd1.v.val_loc); |
30603 | continue; |
30604 | default: |
30605 | continue; |
30606 | } |
30607 | gcc_assert (base_type->die_parent == comp_unit_die ()); |
30608 | if (base_type->die_mark) |
30609 | base_type->die_mark++; |
30610 | else |
30611 | { |
30612 | base_types.safe_push (obj: base_type); |
30613 | base_type->die_mark = 1; |
30614 | } |
30615 | } |
30616 | } |
30617 | |
30618 | /* Stripped-down variant of resolve_addr, mark DW_TAG_base_type nodes |
30619 | referenced from typed stack ops and count how often they are used. */ |
30620 | |
30621 | static void |
30622 | mark_base_types (dw_die_ref die) |
30623 | { |
30624 | dw_die_ref c; |
30625 | dw_attr_node *a; |
30626 | dw_loc_list_ref *curr; |
30627 | unsigned ix; |
30628 | |
30629 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
30630 | switch (AT_class (a)) |
30631 | { |
30632 | case dw_val_class_loc_list: |
30633 | curr = AT_loc_list_ptr (a); |
30634 | while (*curr) |
30635 | { |
30636 | mark_base_types (loc: (*curr)->expr); |
30637 | curr = &(*curr)->dw_loc_next; |
30638 | } |
30639 | break; |
30640 | |
30641 | case dw_val_class_loc: |
30642 | mark_base_types (loc: AT_loc (a)); |
30643 | break; |
30644 | |
30645 | default: |
30646 | break; |
30647 | } |
30648 | |
30649 | FOR_EACH_CHILD (die, c, mark_base_types (c)); |
30650 | } |
30651 | |
30652 | /* Comparison function for sorting marked base types. */ |
30653 | |
30654 | static int |
30655 | base_type_cmp (const void *x, const void *y) |
30656 | { |
30657 | dw_die_ref dx = *(const dw_die_ref *) x; |
30658 | dw_die_ref dy = *(const dw_die_ref *) y; |
30659 | unsigned int byte_size1, byte_size2; |
30660 | unsigned int encoding1, encoding2; |
30661 | unsigned int align1, align2; |
30662 | if (dx->die_mark > dy->die_mark) |
30663 | return -1; |
30664 | if (dx->die_mark < dy->die_mark) |
30665 | return 1; |
30666 | byte_size1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_byte_size); |
30667 | byte_size2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_byte_size); |
30668 | if (byte_size1 < byte_size2) |
30669 | return 1; |
30670 | if (byte_size1 > byte_size2) |
30671 | return -1; |
30672 | encoding1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_encoding); |
30673 | encoding2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_encoding); |
30674 | if (encoding1 < encoding2) |
30675 | return 1; |
30676 | if (encoding1 > encoding2) |
30677 | return -1; |
30678 | align1 = get_AT_unsigned (die: dx, attr_kind: DW_AT_alignment); |
30679 | align2 = get_AT_unsigned (die: dy, attr_kind: DW_AT_alignment); |
30680 | if (align1 < align2) |
30681 | return 1; |
30682 | if (align1 > align2) |
30683 | return -1; |
30684 | return 0; |
30685 | } |
30686 | |
30687 | /* Move base types marked by mark_base_types as early as possible |
30688 | in the CU, sorted by decreasing usage count both to make the |
30689 | uleb128 references as small as possible and to make sure they |
30690 | will have die_offset already computed by calc_die_sizes when |
30691 | sizes of typed stack loc ops is computed. */ |
30692 | |
30693 | static void |
30694 | move_marked_base_types (void) |
30695 | { |
30696 | unsigned int i; |
30697 | dw_die_ref base_type, die, c; |
30698 | |
30699 | if (base_types.is_empty ()) |
30700 | return; |
30701 | |
30702 | /* Sort by decreasing usage count, they will be added again in that |
30703 | order later on. */ |
30704 | base_types.qsort (base_type_cmp); |
30705 | die = comp_unit_die (); |
30706 | c = die->die_child; |
30707 | do |
30708 | { |
30709 | dw_die_ref prev = c; |
30710 | c = c->die_sib; |
30711 | while (c->die_mark) |
30712 | { |
30713 | remove_child_with_prev (child: c, prev); |
30714 | /* As base types got marked, there must be at least |
30715 | one node other than DW_TAG_base_type. */ |
30716 | gcc_assert (die->die_child != NULL); |
30717 | c = prev->die_sib; |
30718 | } |
30719 | } |
30720 | while (c != die->die_child); |
30721 | gcc_assert (die->die_child); |
30722 | c = die->die_child; |
30723 | for (i = 0; base_types.iterate (ix: i, ptr: &base_type); i++) |
30724 | { |
30725 | base_type->die_mark = 0; |
30726 | base_type->die_sib = c->die_sib; |
30727 | c->die_sib = base_type; |
30728 | c = base_type; |
30729 | } |
30730 | } |
30731 | |
30732 | /* Helper function for resolve_addr, attempt to resolve |
30733 | one CONST_STRING, return true if successful. Similarly verify that |
30734 | SYMBOL_REFs refer to variables emitted in the current CU. */ |
30735 | |
30736 | static bool |
30737 | resolve_one_addr (rtx *addr) |
30738 | { |
30739 | rtx rtl = *addr; |
30740 | |
30741 | if (GET_CODE (rtl) == CONST_STRING) |
30742 | { |
30743 | size_t len = strlen (XSTR (rtl, 0)) + 1; |
30744 | tree t = build_string (len, XSTR (rtl, 0)); |
30745 | tree tlen = size_int (len - 1); |
30746 | TREE_TYPE (t) |
30747 | = build_array_type (char_type_node, build_index_type (tlen)); |
30748 | rtl = lookup_constant_def (t); |
30749 | if (!rtl || !MEM_P (rtl)) |
30750 | return false; |
30751 | rtl = XEXP (rtl, 0); |
30752 | if (GET_CODE (rtl) == SYMBOL_REF |
30753 | && SYMBOL_REF_DECL (rtl) |
30754 | && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl))) |
30755 | return false; |
30756 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
30757 | *addr = rtl; |
30758 | return true; |
30759 | } |
30760 | |
30761 | if (GET_CODE (rtl) == SYMBOL_REF |
30762 | && SYMBOL_REF_DECL (rtl)) |
30763 | { |
30764 | if (TREE_CONSTANT_POOL_ADDRESS_P (rtl)) |
30765 | { |
30766 | if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl)))) |
30767 | return false; |
30768 | } |
30769 | else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl))) |
30770 | return false; |
30771 | } |
30772 | |
30773 | if (GET_CODE (rtl) == CONST) |
30774 | { |
30775 | subrtx_ptr_iterator::array_type array; |
30776 | FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL) |
30777 | if (!resolve_one_addr (addr: *iter)) |
30778 | return false; |
30779 | } |
30780 | |
30781 | return true; |
30782 | } |
30783 | |
30784 | /* For STRING_CST, return SYMBOL_REF of its constant pool entry, |
30785 | if possible, and create DW_TAG_dwarf_procedure that can be referenced |
30786 | from DW_OP_implicit_pointer if the string hasn't been seen yet. */ |
30787 | |
30788 | static rtx |
30789 | string_cst_pool_decl (tree t) |
30790 | { |
30791 | rtx rtl = output_constant_def (t, 1); |
30792 | unsigned char *array; |
30793 | dw_loc_descr_ref l; |
30794 | tree decl; |
30795 | size_t len; |
30796 | dw_die_ref ref; |
30797 | |
30798 | if (!rtl || !MEM_P (rtl)) |
30799 | return NULL_RTX; |
30800 | rtl = XEXP (rtl, 0); |
30801 | if (GET_CODE (rtl) != SYMBOL_REF |
30802 | || SYMBOL_REF_DECL (rtl) == NULL_TREE) |
30803 | return NULL_RTX; |
30804 | |
30805 | decl = SYMBOL_REF_DECL (rtl); |
30806 | if (!lookup_decl_die (decl)) |
30807 | { |
30808 | len = TREE_STRING_LENGTH (t); |
30809 | vec_safe_push (v&: used_rtx_array, obj: rtl); |
30810 | ref = new_die (tag_value: DW_TAG_dwarf_procedure, parent_die: comp_unit_die (), t: decl); |
30811 | array = ggc_vec_alloc<unsigned char> (c: len); |
30812 | memcpy (dest: array, TREE_STRING_POINTER (t), n: len); |
30813 | l = new_loc_descr (op: DW_OP_implicit_value, oprnd1: len, oprnd2: 0); |
30814 | l->dw_loc_oprnd2.val_class = dw_val_class_vec; |
30815 | l->dw_loc_oprnd2.v.val_vec.length = len; |
30816 | l->dw_loc_oprnd2.v.val_vec.elt_size = 1; |
30817 | l->dw_loc_oprnd2.v.val_vec.array = array; |
30818 | add_AT_loc (die: ref, attr_kind: DW_AT_location, loc: l); |
30819 | equate_decl_number_to_die (decl, decl_die: ref); |
30820 | } |
30821 | return rtl; |
30822 | } |
30823 | |
30824 | /* Helper function of resolve_addr_in_expr. LOC is |
30825 | a DW_OP_addr followed by DW_OP_stack_value, either at the start |
30826 | of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be |
30827 | resolved. Replace it (both DW_OP_addr and DW_OP_stack_value) |
30828 | with DW_OP_implicit_pointer if possible |
30829 | and return true, if unsuccessful, return false. */ |
30830 | |
30831 | static bool |
30832 | optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc) |
30833 | { |
30834 | rtx rtl = loc->dw_loc_oprnd1.v.val_addr; |
30835 | HOST_WIDE_INT offset = 0; |
30836 | dw_die_ref ref = NULL; |
30837 | tree decl; |
30838 | |
30839 | if (GET_CODE (rtl) == CONST |
30840 | && GET_CODE (XEXP (rtl, 0)) == PLUS |
30841 | && CONST_INT_P (XEXP (XEXP (rtl, 0), 1))) |
30842 | { |
30843 | offset = INTVAL (XEXP (XEXP (rtl, 0), 1)); |
30844 | rtl = XEXP (XEXP (rtl, 0), 0); |
30845 | } |
30846 | if (GET_CODE (rtl) == CONST_STRING) |
30847 | { |
30848 | size_t len = strlen (XSTR (rtl, 0)) + 1; |
30849 | tree t = build_string (len, XSTR (rtl, 0)); |
30850 | tree tlen = size_int (len - 1); |
30851 | |
30852 | TREE_TYPE (t) |
30853 | = build_array_type (char_type_node, build_index_type (tlen)); |
30854 | rtl = string_cst_pool_decl (t); |
30855 | if (!rtl) |
30856 | return false; |
30857 | } |
30858 | if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl)) |
30859 | { |
30860 | decl = SYMBOL_REF_DECL (rtl); |
30861 | if (VAR_P (decl) && !DECL_EXTERNAL (decl)) |
30862 | { |
30863 | ref = lookup_decl_die (decl); |
30864 | if (ref && (get_AT (die: ref, attr_kind: DW_AT_location) |
30865 | || get_AT (die: ref, attr_kind: DW_AT_const_value))) |
30866 | { |
30867 | loc->dw_loc_opc = dwarf_OP (op: DW_OP_implicit_pointer); |
30868 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
30869 | loc->dw_loc_oprnd1.val_entry = NULL; |
30870 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
30871 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
30872 | loc->dw_loc_next = loc->dw_loc_next->dw_loc_next; |
30873 | loc->dw_loc_oprnd2.v.val_int = offset; |
30874 | return true; |
30875 | } |
30876 | } |
30877 | } |
30878 | return false; |
30879 | } |
30880 | |
30881 | /* Helper function for resolve_addr, handle one location |
30882 | expression, return false if at least one CONST_STRING or SYMBOL_REF in |
30883 | the location list couldn't be resolved. */ |
30884 | |
30885 | static bool |
30886 | resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc) |
30887 | { |
30888 | dw_loc_descr_ref keep = NULL; |
30889 | for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next) |
30890 | switch (loc->dw_loc_opc) |
30891 | { |
30892 | case DW_OP_addr: |
30893 | if (!resolve_one_addr (addr: &loc->dw_loc_oprnd1.v.val_addr)) |
30894 | { |
30895 | if ((prev == NULL |
30896 | || prev->dw_loc_opc == DW_OP_piece |
30897 | || prev->dw_loc_opc == DW_OP_bit_piece) |
30898 | && loc->dw_loc_next |
30899 | && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value |
30900 | && (!dwarf_strict || dwarf_version >= 5) |
30901 | && optimize_one_addr_into_implicit_ptr (loc)) |
30902 | break; |
30903 | return false; |
30904 | } |
30905 | break; |
30906 | case DW_OP_GNU_addr_index: |
30907 | case DW_OP_addrx: |
30908 | case DW_OP_GNU_const_index: |
30909 | case DW_OP_constx: |
30910 | if ((loc->dw_loc_opc == DW_OP_GNU_addr_index |
30911 | || loc->dw_loc_opc == DW_OP_addrx) |
30912 | || ((loc->dw_loc_opc == DW_OP_GNU_const_index |
30913 | || loc->dw_loc_opc == DW_OP_constx) |
30914 | && loc->dtprel)) |
30915 | { |
30916 | rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl; |
30917 | if (!resolve_one_addr (addr: &rtl)) |
30918 | return false; |
30919 | remove_addr_table_entry (entry: loc->dw_loc_oprnd1.val_entry); |
30920 | loc->dw_loc_oprnd1.val_entry |
30921 | = add_addr_table_entry (addr: rtl, kind: ate_kind_rtx); |
30922 | } |
30923 | break; |
30924 | case DW_OP_const4u: |
30925 | case DW_OP_const8u: |
30926 | if (loc->dtprel |
30927 | && !resolve_one_addr (addr: &loc->dw_loc_oprnd1.v.val_addr)) |
30928 | return false; |
30929 | break; |
30930 | case DW_OP_plus_uconst: |
30931 | if (size_of_loc_descr (loc) |
30932 | > size_of_int_loc_descriptor (i: loc->dw_loc_oprnd1.v.val_unsigned) |
30933 | + 1 |
30934 | && loc->dw_loc_oprnd1.v.val_unsigned > 0) |
30935 | { |
30936 | dw_loc_descr_ref repl |
30937 | = int_loc_descriptor (poly_i: loc->dw_loc_oprnd1.v.val_unsigned); |
30938 | add_loc_descr (list_head: &repl, descr: new_loc_descr (op: DW_OP_plus, oprnd1: 0, oprnd2: 0)); |
30939 | add_loc_descr (list_head: &repl, descr: loc->dw_loc_next); |
30940 | *loc = *repl; |
30941 | } |
30942 | break; |
30943 | case DW_OP_implicit_value: |
30944 | if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr |
30945 | && !resolve_one_addr (addr: &loc->dw_loc_oprnd2.v.val_addr)) |
30946 | return false; |
30947 | break; |
30948 | case DW_OP_implicit_pointer: |
30949 | case DW_OP_GNU_implicit_pointer: |
30950 | case DW_OP_GNU_parameter_ref: |
30951 | case DW_OP_GNU_variable_value: |
30952 | if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
30953 | { |
30954 | dw_die_ref ref |
30955 | = lookup_decl_die (decl: loc->dw_loc_oprnd1.v.val_decl_ref); |
30956 | if (ref == NULL) |
30957 | return false; |
30958 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
30959 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
30960 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
30961 | } |
30962 | if (loc->dw_loc_opc == DW_OP_GNU_variable_value) |
30963 | { |
30964 | if (prev == NULL |
30965 | && loc->dw_loc_next == NULL |
30966 | && AT_class (a) == dw_val_class_loc) |
30967 | switch (a->dw_attr) |
30968 | { |
30969 | /* Following attributes allow both exprloc and reference, |
30970 | so if the whole expression is DW_OP_GNU_variable_value |
30971 | alone we could transform it into reference. */ |
30972 | case DW_AT_byte_size: |
30973 | case DW_AT_bit_size: |
30974 | case DW_AT_lower_bound: |
30975 | case DW_AT_upper_bound: |
30976 | case DW_AT_bit_stride: |
30977 | case DW_AT_count: |
30978 | case DW_AT_allocated: |
30979 | case DW_AT_associated: |
30980 | case DW_AT_byte_stride: |
30981 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
30982 | a->dw_attr_val.val_entry = NULL; |
30983 | a->dw_attr_val.v.val_die_ref.die |
30984 | = loc->dw_loc_oprnd1.v.val_die_ref.die; |
30985 | a->dw_attr_val.v.val_die_ref.external = 0; |
30986 | return true; |
30987 | default: |
30988 | break; |
30989 | } |
30990 | if (dwarf_strict) |
30991 | return false; |
30992 | } |
30993 | break; |
30994 | case DW_OP_const_type: |
30995 | case DW_OP_regval_type: |
30996 | case DW_OP_deref_type: |
30997 | case DW_OP_convert: |
30998 | case DW_OP_reinterpret: |
30999 | case DW_OP_GNU_const_type: |
31000 | case DW_OP_GNU_regval_type: |
31001 | case DW_OP_GNU_deref_type: |
31002 | case DW_OP_GNU_convert: |
31003 | case DW_OP_GNU_reinterpret: |
31004 | while (loc->dw_loc_next |
31005 | && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert |
31006 | || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert)) |
31007 | { |
31008 | dw_die_ref base1, base2; |
31009 | unsigned enc1, enc2, size1, size2; |
31010 | if (loc->dw_loc_opc == DW_OP_regval_type |
31011 | || loc->dw_loc_opc == DW_OP_deref_type |
31012 | || loc->dw_loc_opc == DW_OP_GNU_regval_type |
31013 | || loc->dw_loc_opc == DW_OP_GNU_deref_type) |
31014 | base1 = loc->dw_loc_oprnd2.v.val_die_ref.die; |
31015 | else if (loc->dw_loc_oprnd1.val_class |
31016 | == dw_val_class_unsigned_const) |
31017 | break; |
31018 | else |
31019 | base1 = loc->dw_loc_oprnd1.v.val_die_ref.die; |
31020 | if (loc->dw_loc_next->dw_loc_oprnd1.val_class |
31021 | == dw_val_class_unsigned_const) |
31022 | break; |
31023 | base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die; |
31024 | gcc_assert (base1->die_tag == DW_TAG_base_type |
31025 | && base2->die_tag == DW_TAG_base_type); |
31026 | enc1 = get_AT_unsigned (die: base1, attr_kind: DW_AT_encoding); |
31027 | enc2 = get_AT_unsigned (die: base2, attr_kind: DW_AT_encoding); |
31028 | size1 = get_AT_unsigned (die: base1, attr_kind: DW_AT_byte_size); |
31029 | size2 = get_AT_unsigned (die: base2, attr_kind: DW_AT_byte_size); |
31030 | if (size1 == size2 |
31031 | && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed) |
31032 | && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed) |
31033 | && loc != keep) |
31034 | || enc1 == enc2)) |
31035 | { |
31036 | /* Optimize away next DW_OP_convert after |
31037 | adjusting LOC's base type die reference. */ |
31038 | if (loc->dw_loc_opc == DW_OP_regval_type |
31039 | || loc->dw_loc_opc == DW_OP_deref_type |
31040 | || loc->dw_loc_opc == DW_OP_GNU_regval_type |
31041 | || loc->dw_loc_opc == DW_OP_GNU_deref_type) |
31042 | loc->dw_loc_oprnd2.v.val_die_ref.die = base2; |
31043 | else |
31044 | loc->dw_loc_oprnd1.v.val_die_ref.die = base2; |
31045 | loc->dw_loc_next = loc->dw_loc_next->dw_loc_next; |
31046 | continue; |
31047 | } |
31048 | /* Don't change integer DW_OP_convert after e.g. floating |
31049 | point typed stack entry. */ |
31050 | else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed) |
31051 | keep = loc->dw_loc_next; |
31052 | break; |
31053 | } |
31054 | break; |
31055 | default: |
31056 | break; |
31057 | } |
31058 | return true; |
31059 | } |
31060 | |
31061 | /* Helper function of resolve_addr. DIE had DW_AT_location of |
31062 | DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand |
31063 | and DW_OP_addr couldn't be resolved. resolve_addr has already |
31064 | removed the DW_AT_location attribute. This function attempts to |
31065 | add a new DW_AT_location attribute with DW_OP_implicit_pointer |
31066 | to it or DW_AT_const_value attribute, if possible. */ |
31067 | |
31068 | static void |
31069 | optimize_location_into_implicit_ptr (dw_die_ref die, tree decl) |
31070 | { |
31071 | if (!VAR_P (decl) |
31072 | || lookup_decl_die (decl) != die |
31073 | || DECL_EXTERNAL (decl) |
31074 | || !TREE_STATIC (decl) |
31075 | || DECL_INITIAL (decl) == NULL_TREE |
31076 | || DECL_P (DECL_INITIAL (decl)) |
31077 | || get_AT (die, attr_kind: DW_AT_const_value)) |
31078 | return; |
31079 | |
31080 | tree init = DECL_INITIAL (decl); |
31081 | HOST_WIDE_INT offset = 0; |
31082 | /* For variables that have been optimized away and thus |
31083 | don't have a memory location, see if we can emit |
31084 | DW_AT_const_value instead. */ |
31085 | if (tree_add_const_value_attribute (die, t: init)) |
31086 | return; |
31087 | if (dwarf_strict && dwarf_version < 5) |
31088 | return; |
31089 | /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR, |
31090 | and ADDR_EXPR refers to a decl that has DW_AT_location or |
31091 | DW_AT_const_value (but isn't addressable, otherwise |
31092 | resolving the original DW_OP_addr wouldn't fail), see if |
31093 | we can add DW_OP_implicit_pointer. */ |
31094 | STRIP_NOPS (init); |
31095 | if (TREE_CODE (init) == POINTER_PLUS_EXPR |
31096 | && tree_fits_shwi_p (TREE_OPERAND (init, 1))) |
31097 | { |
31098 | offset = tree_to_shwi (TREE_OPERAND (init, 1)); |
31099 | init = TREE_OPERAND (init, 0); |
31100 | STRIP_NOPS (init); |
31101 | } |
31102 | if (TREE_CODE (init) != ADDR_EXPR) |
31103 | return; |
31104 | if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST |
31105 | && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0))) |
31106 | || (VAR_P (TREE_OPERAND (init, 0)) |
31107 | && !DECL_EXTERNAL (TREE_OPERAND (init, 0)) |
31108 | && TREE_OPERAND (init, 0) != decl)) |
31109 | { |
31110 | dw_die_ref ref; |
31111 | dw_loc_descr_ref l; |
31112 | |
31113 | if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST) |
31114 | { |
31115 | rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0)); |
31116 | if (!rtl) |
31117 | return; |
31118 | decl = SYMBOL_REF_DECL (rtl); |
31119 | } |
31120 | else |
31121 | decl = TREE_OPERAND (init, 0); |
31122 | ref = lookup_decl_die (decl); |
31123 | if (ref == NULL |
31124 | || (!get_AT (die: ref, attr_kind: DW_AT_location) |
31125 | && !get_AT (die: ref, attr_kind: DW_AT_const_value))) |
31126 | return; |
31127 | l = new_loc_descr (op: dwarf_OP (op: DW_OP_implicit_pointer), oprnd1: 0, oprnd2: offset); |
31128 | l->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
31129 | l->dw_loc_oprnd1.v.val_die_ref.die = ref; |
31130 | l->dw_loc_oprnd1.v.val_die_ref.external = 0; |
31131 | add_AT_loc (die, attr_kind: DW_AT_location, loc: l); |
31132 | } |
31133 | } |
31134 | |
31135 | /* Return NULL if l is a DWARF expression, or first op that is not |
31136 | valid DWARF expression. */ |
31137 | |
31138 | static dw_loc_descr_ref |
31139 | non_dwarf_expression (dw_loc_descr_ref l) |
31140 | { |
31141 | while (l) |
31142 | { |
31143 | if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31) |
31144 | return l; |
31145 | switch (l->dw_loc_opc) |
31146 | { |
31147 | case DW_OP_regx: |
31148 | case DW_OP_implicit_value: |
31149 | case DW_OP_stack_value: |
31150 | case DW_OP_implicit_pointer: |
31151 | case DW_OP_GNU_implicit_pointer: |
31152 | case DW_OP_GNU_parameter_ref: |
31153 | case DW_OP_piece: |
31154 | case DW_OP_bit_piece: |
31155 | return l; |
31156 | default: |
31157 | break; |
31158 | } |
31159 | l = l->dw_loc_next; |
31160 | } |
31161 | return NULL; |
31162 | } |
31163 | |
31164 | /* Return adjusted copy of EXPR: |
31165 | If it is empty DWARF expression, return it. |
31166 | If it is valid non-empty DWARF expression, |
31167 | return copy of EXPR with DW_OP_deref appended to it. |
31168 | If it is DWARF expression followed by DW_OP_reg{N,x}, return |
31169 | copy of the DWARF expression with DW_OP_breg{N,x} <0> appended. |
31170 | If it is DWARF expression followed by DW_OP_stack_value, return |
31171 | copy of the DWARF expression without anything appended. |
31172 | Otherwise, return NULL. */ |
31173 | |
31174 | static dw_loc_descr_ref |
31175 | copy_deref_exprloc (dw_loc_descr_ref expr) |
31176 | { |
31177 | dw_loc_descr_ref tail = NULL; |
31178 | |
31179 | if (expr == NULL) |
31180 | return NULL; |
31181 | |
31182 | dw_loc_descr_ref l = non_dwarf_expression (l: expr); |
31183 | if (l && l->dw_loc_next) |
31184 | return NULL; |
31185 | |
31186 | if (l) |
31187 | { |
31188 | if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31) |
31189 | tail = new_loc_descr (op: (enum dwarf_location_atom) |
31190 | (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)), |
31191 | oprnd1: 0, oprnd2: 0); |
31192 | else |
31193 | switch (l->dw_loc_opc) |
31194 | { |
31195 | case DW_OP_regx: |
31196 | tail = new_loc_descr (op: DW_OP_bregx, |
31197 | oprnd1: l->dw_loc_oprnd1.v.val_unsigned, oprnd2: 0); |
31198 | break; |
31199 | case DW_OP_stack_value: |
31200 | break; |
31201 | default: |
31202 | return NULL; |
31203 | } |
31204 | } |
31205 | else |
31206 | tail = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
31207 | |
31208 | dw_loc_descr_ref ret = NULL, *p = &ret; |
31209 | while (expr != l) |
31210 | { |
31211 | *p = new_loc_descr (op: expr->dw_loc_opc, oprnd1: 0, oprnd2: 0); |
31212 | (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1; |
31213 | (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2; |
31214 | p = &(*p)->dw_loc_next; |
31215 | expr = expr->dw_loc_next; |
31216 | } |
31217 | *p = tail; |
31218 | return ret; |
31219 | } |
31220 | |
31221 | /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value |
31222 | reference to a variable or argument, adjust it if needed and return: |
31223 | -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size |
31224 | attribute if present should be removed |
31225 | 0 keep the attribute perhaps with minor modifications, no need to rescan |
31226 | 1 if the attribute has been successfully adjusted. */ |
31227 | |
31228 | static int |
31229 | optimize_string_length (dw_attr_node *a) |
31230 | { |
31231 | dw_loc_descr_ref l = AT_loc (a), lv; |
31232 | dw_die_ref die; |
31233 | if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
31234 | { |
31235 | tree decl = l->dw_loc_oprnd1.v.val_decl_ref; |
31236 | die = lookup_decl_die (decl); |
31237 | if (die) |
31238 | { |
31239 | l->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
31240 | l->dw_loc_oprnd1.v.val_die_ref.die = die; |
31241 | l->dw_loc_oprnd1.v.val_die_ref.external = 0; |
31242 | } |
31243 | else |
31244 | return -1; |
31245 | } |
31246 | else |
31247 | die = l->dw_loc_oprnd1.v.val_die_ref.die; |
31248 | |
31249 | /* DWARF5 allows reference class, so we can then reference the DIE. |
31250 | Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */ |
31251 | if (l->dw_loc_next != NULL && dwarf_version >= 5) |
31252 | { |
31253 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
31254 | a->dw_attr_val.val_entry = NULL; |
31255 | a->dw_attr_val.v.val_die_ref.die = die; |
31256 | a->dw_attr_val.v.val_die_ref.external = 0; |
31257 | return 0; |
31258 | } |
31259 | |
31260 | dw_attr_node *av = get_AT (die, attr_kind: DW_AT_location); |
31261 | dw_loc_list_ref d; |
31262 | bool non_dwarf_expr = false; |
31263 | |
31264 | if (av == NULL) |
31265 | return dwarf_strict ? -1 : 0; |
31266 | switch (AT_class (a: av)) |
31267 | { |
31268 | case dw_val_class_loc_list: |
31269 | for (d = AT_loc_list (a: av); d != NULL; d = d->dw_loc_next) |
31270 | if (d->expr && non_dwarf_expression (l: d->expr)) |
31271 | non_dwarf_expr = true; |
31272 | break; |
31273 | case dw_val_class_view_list: |
31274 | gcc_unreachable (); |
31275 | case dw_val_class_loc: |
31276 | lv = AT_loc (a: av); |
31277 | if (lv == NULL) |
31278 | return dwarf_strict ? -1 : 0; |
31279 | if (non_dwarf_expression (l: lv)) |
31280 | non_dwarf_expr = true; |
31281 | break; |
31282 | default: |
31283 | return dwarf_strict ? -1 : 0; |
31284 | } |
31285 | |
31286 | /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value |
31287 | into DW_OP_call4 or DW_OP_GNU_variable_value into |
31288 | DW_OP_call4 DW_OP_deref, do so. */ |
31289 | if (!non_dwarf_expr |
31290 | && (l->dw_loc_next != NULL || AT_class (a: av) == dw_val_class_loc)) |
31291 | { |
31292 | l->dw_loc_opc = DW_OP_call4; |
31293 | if (l->dw_loc_next) |
31294 | l->dw_loc_next = NULL; |
31295 | else |
31296 | l->dw_loc_next = new_loc_descr (op: DW_OP_deref, oprnd1: 0, oprnd2: 0); |
31297 | return 0; |
31298 | } |
31299 | |
31300 | /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just |
31301 | copy over the DW_AT_location attribute from die to a. */ |
31302 | if (l->dw_loc_next != NULL) |
31303 | { |
31304 | a->dw_attr_val = av->dw_attr_val; |
31305 | return 1; |
31306 | } |
31307 | |
31308 | dw_loc_list_ref list, *p; |
31309 | switch (AT_class (a: av)) |
31310 | { |
31311 | case dw_val_class_loc_list: |
31312 | p = &list; |
31313 | list = NULL; |
31314 | for (d = AT_loc_list (a: av); d != NULL; d = d->dw_loc_next) |
31315 | { |
31316 | lv = copy_deref_exprloc (expr: d->expr); |
31317 | if (lv) |
31318 | { |
31319 | *p = new_loc_list (expr: lv, begin: d->begin, vbegin: d->vbegin, end: d->end, vend: d->vend, section: d->section); |
31320 | p = &(*p)->dw_loc_next; |
31321 | } |
31322 | else if (!dwarf_strict && d->expr) |
31323 | return 0; |
31324 | } |
31325 | if (list == NULL) |
31326 | return dwarf_strict ? -1 : 0; |
31327 | a->dw_attr_val.val_class = dw_val_class_loc_list; |
31328 | gen_llsym (list); |
31329 | *AT_loc_list_ptr (a) = list; |
31330 | return 1; |
31331 | case dw_val_class_loc: |
31332 | lv = copy_deref_exprloc (expr: AT_loc (a: av)); |
31333 | if (lv == NULL) |
31334 | return dwarf_strict ? -1 : 0; |
31335 | a->dw_attr_val.v.val_loc = lv; |
31336 | return 1; |
31337 | default: |
31338 | gcc_unreachable (); |
31339 | } |
31340 | } |
31341 | |
31342 | /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to |
31343 | an address in .rodata section if the string literal is emitted there, |
31344 | or remove the containing location list or replace DW_AT_const_value |
31345 | with DW_AT_location and empty location expression, if it isn't found |
31346 | in .rodata. Similarly for SYMBOL_REFs, keep only those that refer |
31347 | to something that has been emitted in the current CU. */ |
31348 | |
31349 | static void |
31350 | resolve_addr (dw_die_ref die) |
31351 | { |
31352 | dw_die_ref c; |
31353 | dw_attr_node *a; |
31354 | dw_loc_list_ref *curr, *start, loc; |
31355 | unsigned ix; |
31356 | bool remove_AT_byte_size = false; |
31357 | |
31358 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
31359 | switch (AT_class (a)) |
31360 | { |
31361 | case dw_val_class_loc_list: |
31362 | start = curr = AT_loc_list_ptr (a); |
31363 | loc = *curr; |
31364 | gcc_assert (loc); |
31365 | /* The same list can be referenced more than once. See if we have |
31366 | already recorded the result from a previous pass. */ |
31367 | if (loc->replaced) |
31368 | *curr = loc->dw_loc_next; |
31369 | else if (!loc->resolved_addr) |
31370 | { |
31371 | /* As things stand, we do not expect or allow one die to |
31372 | reference a suffix of another die's location list chain. |
31373 | References must be identical or completely separate. |
31374 | There is therefore no need to cache the result of this |
31375 | pass on any list other than the first; doing so |
31376 | would lead to unnecessary writes. */ |
31377 | while (*curr) |
31378 | { |
31379 | gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr); |
31380 | if (!resolve_addr_in_expr (a, loc: (*curr)->expr)) |
31381 | { |
31382 | dw_loc_list_ref next = (*curr)->dw_loc_next; |
31383 | dw_loc_descr_ref l = (*curr)->expr; |
31384 | |
31385 | if (next && (*curr)->ll_symbol) |
31386 | { |
31387 | gcc_assert (!next->ll_symbol); |
31388 | next->ll_symbol = (*curr)->ll_symbol; |
31389 | next->vl_symbol = (*curr)->vl_symbol; |
31390 | } |
31391 | if (dwarf_split_debug_info) |
31392 | remove_loc_list_addr_table_entries (descr: l); |
31393 | *curr = next; |
31394 | } |
31395 | else |
31396 | { |
31397 | mark_base_types (loc: (*curr)->expr); |
31398 | curr = &(*curr)->dw_loc_next; |
31399 | } |
31400 | } |
31401 | if (loc == *start) |
31402 | loc->resolved_addr = 1; |
31403 | else |
31404 | { |
31405 | loc->replaced = 1; |
31406 | loc->dw_loc_next = *start; |
31407 | } |
31408 | } |
31409 | if (!*start) |
31410 | { |
31411 | remove_AT (die, attr_kind: a->dw_attr); |
31412 | ix--; |
31413 | } |
31414 | break; |
31415 | case dw_val_class_view_list: |
31416 | { |
31417 | gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews); |
31418 | gcc_checking_assert (dwarf2out_locviews_in_attribute ()); |
31419 | dw_val_node *llnode |
31420 | = view_list_to_loc_list_val_node (val: &a->dw_attr_val); |
31421 | /* If we no longer have a loclist, or it no longer needs |
31422 | views, drop this attribute. */ |
31423 | if (!llnode || !llnode->v.val_loc_list->vl_symbol) |
31424 | { |
31425 | remove_AT (die, attr_kind: a->dw_attr); |
31426 | ix--; |
31427 | } |
31428 | break; |
31429 | } |
31430 | case dw_val_class_loc: |
31431 | { |
31432 | dw_loc_descr_ref l = AT_loc (a); |
31433 | /* DW_OP_GNU_variable_value DW_OP_stack_value or |
31434 | DW_OP_GNU_variable_value in DW_AT_string_length can be converted |
31435 | into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard |
31436 | DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5 |
31437 | DW_OP_GNU_variable_value DW_OP_stack_value can be replaced |
31438 | with DW_FORM_ref referencing the same DIE as |
31439 | DW_OP_GNU_variable_value used to reference. */ |
31440 | if (a->dw_attr == DW_AT_string_length |
31441 | && l |
31442 | && l->dw_loc_opc == DW_OP_GNU_variable_value |
31443 | && (l->dw_loc_next == NULL |
31444 | || (l->dw_loc_next->dw_loc_next == NULL |
31445 | && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value))) |
31446 | { |
31447 | switch (optimize_string_length (a)) |
31448 | { |
31449 | case -1: |
31450 | remove_AT (die, attr_kind: a->dw_attr); |
31451 | ix--; |
31452 | /* If we drop DW_AT_string_length, we need to drop also |
31453 | DW_AT_{string_length_,}byte_size. */ |
31454 | remove_AT_byte_size = true; |
31455 | continue; |
31456 | default: |
31457 | break; |
31458 | case 1: |
31459 | /* Even if we keep the optimized DW_AT_string_length, |
31460 | it might have changed AT_class, so process it again. */ |
31461 | ix--; |
31462 | continue; |
31463 | } |
31464 | } |
31465 | /* For -gdwarf-2 don't attempt to optimize |
31466 | DW_AT_data_member_location containing |
31467 | DW_OP_plus_uconst - older consumers might |
31468 | rely on it being that op instead of a more complex, |
31469 | but shorter, location description. */ |
31470 | if ((dwarf_version > 2 |
31471 | || a->dw_attr != DW_AT_data_member_location |
31472 | || l == NULL |
31473 | || l->dw_loc_opc != DW_OP_plus_uconst |
31474 | || l->dw_loc_next != NULL) |
31475 | && !resolve_addr_in_expr (a, loc: l)) |
31476 | { |
31477 | if (dwarf_split_debug_info) |
31478 | remove_loc_list_addr_table_entries (descr: l); |
31479 | if (l != NULL |
31480 | && l->dw_loc_next == NULL |
31481 | && l->dw_loc_opc == DW_OP_addr |
31482 | && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF |
31483 | && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr) |
31484 | && a->dw_attr == DW_AT_location) |
31485 | { |
31486 | tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr); |
31487 | remove_AT (die, attr_kind: a->dw_attr); |
31488 | ix--; |
31489 | optimize_location_into_implicit_ptr (die, decl); |
31490 | break; |
31491 | } |
31492 | if (a->dw_attr == DW_AT_string_length) |
31493 | /* If we drop DW_AT_string_length, we need to drop also |
31494 | DW_AT_{string_length_,}byte_size. */ |
31495 | remove_AT_byte_size = true; |
31496 | remove_AT (die, attr_kind: a->dw_attr); |
31497 | ix--; |
31498 | } |
31499 | else |
31500 | mark_base_types (loc: l); |
31501 | } |
31502 | break; |
31503 | case dw_val_class_addr: |
31504 | if (a->dw_attr == DW_AT_const_value |
31505 | && !resolve_one_addr (addr: &a->dw_attr_val.v.val_addr)) |
31506 | { |
31507 | if (AT_index (a) != NOT_INDEXED) |
31508 | remove_addr_table_entry (entry: a->dw_attr_val.val_entry); |
31509 | remove_AT (die, attr_kind: a->dw_attr); |
31510 | ix--; |
31511 | } |
31512 | if ((die->die_tag == DW_TAG_call_site |
31513 | && a->dw_attr == DW_AT_call_origin) |
31514 | || (die->die_tag == DW_TAG_GNU_call_site |
31515 | && a->dw_attr == DW_AT_abstract_origin)) |
31516 | { |
31517 | tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr); |
31518 | dw_die_ref tdie = lookup_decl_die (decl: tdecl); |
31519 | dw_die_ref cdie; |
31520 | if (tdie == NULL |
31521 | && DECL_EXTERNAL (tdecl) |
31522 | && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE |
31523 | && (cdie = lookup_context_die (DECL_CONTEXT (tdecl)))) |
31524 | { |
31525 | dw_die_ref pdie = cdie; |
31526 | /* Make sure we don't add these DIEs into type units. |
31527 | We could emit skeleton DIEs for context (namespaces, |
31528 | outer structs/classes) and a skeleton DIE for the |
31529 | innermost context with DW_AT_signature pointing to the |
31530 | type unit. See PR78835. */ |
31531 | while (pdie && pdie->die_tag != DW_TAG_type_unit) |
31532 | pdie = pdie->die_parent; |
31533 | if (pdie == NULL) |
31534 | { |
31535 | /* Creating a full DIE for tdecl is overly expensive and |
31536 | at this point even wrong when in the LTO phase |
31537 | as it can end up generating new type DIEs we didn't |
31538 | output and thus optimize_external_refs will crash. */ |
31539 | tdie = new_die (tag_value: DW_TAG_subprogram, parent_die: cdie, NULL_TREE); |
31540 | add_AT_flag (die: tdie, attr_kind: DW_AT_external, flag: 1); |
31541 | add_AT_flag (die: tdie, attr_kind: DW_AT_declaration, flag: 1); |
31542 | add_linkage_attr (die: tdie, decl: tdecl); |
31543 | add_name_and_src_coords_attributes (die: tdie, decl: tdecl, no_linkage_name: true); |
31544 | equate_decl_number_to_die (decl: tdecl, decl_die: tdie); |
31545 | } |
31546 | } |
31547 | if (tdie) |
31548 | { |
31549 | a->dw_attr_val.val_class = dw_val_class_die_ref; |
31550 | a->dw_attr_val.v.val_die_ref.die = tdie; |
31551 | a->dw_attr_val.v.val_die_ref.external = 0; |
31552 | } |
31553 | else |
31554 | { |
31555 | if (AT_index (a) != NOT_INDEXED) |
31556 | remove_addr_table_entry (entry: a->dw_attr_val.val_entry); |
31557 | remove_AT (die, attr_kind: a->dw_attr); |
31558 | ix--; |
31559 | } |
31560 | } |
31561 | break; |
31562 | default: |
31563 | break; |
31564 | } |
31565 | |
31566 | if (remove_AT_byte_size) |
31567 | remove_AT (die, dwarf_version >= 5 |
31568 | ? DW_AT_string_length_byte_size |
31569 | : DW_AT_byte_size); |
31570 | |
31571 | FOR_EACH_CHILD (die, c, resolve_addr (c)); |
31572 | } |
31573 | |
31574 | /* Helper routines for optimize_location_lists. |
31575 | This pass tries to share identical local lists in .debug_loc |
31576 | section. */ |
31577 | |
31578 | /* Iteratively hash operands of LOC opcode into HSTATE. */ |
31579 | |
31580 | static void |
31581 | hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate) |
31582 | { |
31583 | dw_val_ref val1 = &loc->dw_loc_oprnd1; |
31584 | dw_val_ref val2 = &loc->dw_loc_oprnd2; |
31585 | |
31586 | switch (loc->dw_loc_opc) |
31587 | { |
31588 | case DW_OP_const4u: |
31589 | case DW_OP_const8u: |
31590 | if (loc->dtprel) |
31591 | goto hash_addr; |
31592 | /* FALLTHRU */ |
31593 | case DW_OP_const1u: |
31594 | case DW_OP_const1s: |
31595 | case DW_OP_const2u: |
31596 | case DW_OP_const2s: |
31597 | case DW_OP_const4s: |
31598 | case DW_OP_const8s: |
31599 | case DW_OP_constu: |
31600 | case DW_OP_consts: |
31601 | case DW_OP_pick: |
31602 | case DW_OP_plus_uconst: |
31603 | case DW_OP_breg0: |
31604 | case DW_OP_breg1: |
31605 | case DW_OP_breg2: |
31606 | case DW_OP_breg3: |
31607 | case DW_OP_breg4: |
31608 | case DW_OP_breg5: |
31609 | case DW_OP_breg6: |
31610 | case DW_OP_breg7: |
31611 | case DW_OP_breg8: |
31612 | case DW_OP_breg9: |
31613 | case DW_OP_breg10: |
31614 | case DW_OP_breg11: |
31615 | case DW_OP_breg12: |
31616 | case DW_OP_breg13: |
31617 | case DW_OP_breg14: |
31618 | case DW_OP_breg15: |
31619 | case DW_OP_breg16: |
31620 | case DW_OP_breg17: |
31621 | case DW_OP_breg18: |
31622 | case DW_OP_breg19: |
31623 | case DW_OP_breg20: |
31624 | case DW_OP_breg21: |
31625 | case DW_OP_breg22: |
31626 | case DW_OP_breg23: |
31627 | case DW_OP_breg24: |
31628 | case DW_OP_breg25: |
31629 | case DW_OP_breg26: |
31630 | case DW_OP_breg27: |
31631 | case DW_OP_breg28: |
31632 | case DW_OP_breg29: |
31633 | case DW_OP_breg30: |
31634 | case DW_OP_breg31: |
31635 | case DW_OP_regx: |
31636 | case DW_OP_fbreg: |
31637 | case DW_OP_piece: |
31638 | case DW_OP_deref_size: |
31639 | case DW_OP_xderef_size: |
31640 | hstate.add_object (obj&: val1->v.val_int); |
31641 | break; |
31642 | case DW_OP_skip: |
31643 | case DW_OP_bra: |
31644 | { |
31645 | int offset; |
31646 | |
31647 | gcc_assert (val1->val_class == dw_val_class_loc); |
31648 | offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3); |
31649 | hstate.add_object (obj&: offset); |
31650 | } |
31651 | break; |
31652 | case DW_OP_implicit_value: |
31653 | hstate.add_object (obj&: val1->v.val_unsigned); |
31654 | switch (val2->val_class) |
31655 | { |
31656 | case dw_val_class_const: |
31657 | hstate.add_object (obj&: val2->v.val_int); |
31658 | break; |
31659 | case dw_val_class_vec: |
31660 | { |
31661 | unsigned int elt_size = val2->v.val_vec.elt_size; |
31662 | unsigned int len = val2->v.val_vec.length; |
31663 | |
31664 | hstate.add_int (v: elt_size); |
31665 | hstate.add_int (v: len); |
31666 | hstate.add (data: val2->v.val_vec.array, len: len * elt_size); |
31667 | } |
31668 | break; |
31669 | case dw_val_class_const_double: |
31670 | hstate.add_object (obj&: val2->v.val_double.low); |
31671 | hstate.add_object (obj&: val2->v.val_double.high); |
31672 | break; |
31673 | case dw_val_class_wide_int: |
31674 | hstate.add (data: val2->v.val_wide->get_val (), |
31675 | len: get_full_len (op: *val2->v.val_wide) |
31676 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
31677 | break; |
31678 | case dw_val_class_addr: |
31679 | inchash::add_rtx (val2->v.val_addr, hstate); |
31680 | break; |
31681 | default: |
31682 | gcc_unreachable (); |
31683 | } |
31684 | break; |
31685 | case DW_OP_bregx: |
31686 | case DW_OP_bit_piece: |
31687 | hstate.add_object (obj&: val1->v.val_int); |
31688 | hstate.add_object (obj&: val2->v.val_int); |
31689 | break; |
31690 | case DW_OP_addr: |
31691 | hash_addr: |
31692 | if (loc->dtprel) |
31693 | { |
31694 | unsigned char dtprel = 0xd1; |
31695 | hstate.add_object (obj&: dtprel); |
31696 | } |
31697 | inchash::add_rtx (val1->v.val_addr, hstate); |
31698 | break; |
31699 | case DW_OP_GNU_addr_index: |
31700 | case DW_OP_addrx: |
31701 | case DW_OP_GNU_const_index: |
31702 | case DW_OP_constx: |
31703 | { |
31704 | if (loc->dtprel) |
31705 | { |
31706 | unsigned char dtprel = 0xd1; |
31707 | hstate.add_object (obj&: dtprel); |
31708 | } |
31709 | inchash::add_rtx (val1->val_entry->addr.rtl, hstate); |
31710 | } |
31711 | break; |
31712 | case DW_OP_implicit_pointer: |
31713 | case DW_OP_GNU_implicit_pointer: |
31714 | hstate.add_int (v: val2->v.val_int); |
31715 | break; |
31716 | case DW_OP_entry_value: |
31717 | case DW_OP_GNU_entry_value: |
31718 | hstate.add_object (obj&: val1->v.val_loc); |
31719 | break; |
31720 | case DW_OP_regval_type: |
31721 | case DW_OP_deref_type: |
31722 | case DW_OP_GNU_regval_type: |
31723 | case DW_OP_GNU_deref_type: |
31724 | { |
31725 | unsigned int byte_size |
31726 | = get_AT_unsigned (die: val2->v.val_die_ref.die, attr_kind: DW_AT_byte_size); |
31727 | unsigned int encoding |
31728 | = get_AT_unsigned (die: val2->v.val_die_ref.die, attr_kind: DW_AT_encoding); |
31729 | hstate.add_object (obj&: val1->v.val_int); |
31730 | hstate.add_object (obj&: byte_size); |
31731 | hstate.add_object (obj&: encoding); |
31732 | } |
31733 | break; |
31734 | case DW_OP_convert: |
31735 | case DW_OP_reinterpret: |
31736 | case DW_OP_GNU_convert: |
31737 | case DW_OP_GNU_reinterpret: |
31738 | if (val1->val_class == dw_val_class_unsigned_const) |
31739 | { |
31740 | hstate.add_object (obj&: val1->v.val_unsigned); |
31741 | break; |
31742 | } |
31743 | /* FALLTHRU */ |
31744 | case DW_OP_const_type: |
31745 | case DW_OP_GNU_const_type: |
31746 | { |
31747 | unsigned int byte_size |
31748 | = get_AT_unsigned (die: val1->v.val_die_ref.die, attr_kind: DW_AT_byte_size); |
31749 | unsigned int encoding |
31750 | = get_AT_unsigned (die: val1->v.val_die_ref.die, attr_kind: DW_AT_encoding); |
31751 | hstate.add_object (obj&: byte_size); |
31752 | hstate.add_object (obj&: encoding); |
31753 | if (loc->dw_loc_opc != DW_OP_const_type |
31754 | && loc->dw_loc_opc != DW_OP_GNU_const_type) |
31755 | break; |
31756 | hstate.add_object (obj&: val2->val_class); |
31757 | switch (val2->val_class) |
31758 | { |
31759 | case dw_val_class_const: |
31760 | hstate.add_object (obj&: val2->v.val_int); |
31761 | break; |
31762 | case dw_val_class_vec: |
31763 | { |
31764 | unsigned int elt_size = val2->v.val_vec.elt_size; |
31765 | unsigned int len = val2->v.val_vec.length; |
31766 | |
31767 | hstate.add_object (obj&: elt_size); |
31768 | hstate.add_object (obj&: len); |
31769 | hstate.add (data: val2->v.val_vec.array, len: len * elt_size); |
31770 | } |
31771 | break; |
31772 | case dw_val_class_const_double: |
31773 | hstate.add_object (obj&: val2->v.val_double.low); |
31774 | hstate.add_object (obj&: val2->v.val_double.high); |
31775 | break; |
31776 | case dw_val_class_wide_int: |
31777 | hstate.add (data: val2->v.val_wide->get_val (), |
31778 | len: get_full_len (op: *val2->v.val_wide) |
31779 | * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR); |
31780 | break; |
31781 | default: |
31782 | gcc_unreachable (); |
31783 | } |
31784 | } |
31785 | break; |
31786 | |
31787 | default: |
31788 | /* Other codes have no operands. */ |
31789 | break; |
31790 | } |
31791 | } |
31792 | |
31793 | /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */ |
31794 | |
31795 | static inline void |
31796 | hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate) |
31797 | { |
31798 | dw_loc_descr_ref l; |
31799 | bool sizes_computed = false; |
31800 | /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */ |
31801 | size_of_locs (loc); |
31802 | |
31803 | for (l = loc; l != NULL; l = l->dw_loc_next) |
31804 | { |
31805 | enum dwarf_location_atom opc = l->dw_loc_opc; |
31806 | hstate.add_object (obj&: opc); |
31807 | if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed) |
31808 | { |
31809 | size_of_locs (loc); |
31810 | sizes_computed = true; |
31811 | } |
31812 | hash_loc_operands (loc: l, hstate); |
31813 | } |
31814 | } |
31815 | |
31816 | /* Compute hash of the whole location list LIST_HEAD. */ |
31817 | |
31818 | static inline void |
31819 | hash_loc_list (dw_loc_list_ref list_head) |
31820 | { |
31821 | dw_loc_list_ref curr = list_head; |
31822 | inchash::hash hstate; |
31823 | |
31824 | for (curr = list_head; curr != NULL; curr = curr->dw_loc_next) |
31825 | { |
31826 | hstate.add (data: curr->begin, len: strlen (s: curr->begin) + 1); |
31827 | hstate.add (data: curr->end, len: strlen (s: curr->end) + 1); |
31828 | hstate.add_object (obj&: curr->vbegin); |
31829 | hstate.add_object (obj&: curr->vend); |
31830 | if (curr->section) |
31831 | hstate.add (data: curr->section, len: strlen (s: curr->section) + 1); |
31832 | hash_locs (loc: curr->expr, hstate); |
31833 | } |
31834 | list_head->hash = hstate.end (); |
31835 | } |
31836 | |
31837 | /* Return true if X and Y opcodes have the same operands. */ |
31838 | |
31839 | static inline bool |
31840 | compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y) |
31841 | { |
31842 | dw_val_ref valx1 = &x->dw_loc_oprnd1; |
31843 | dw_val_ref valx2 = &x->dw_loc_oprnd2; |
31844 | dw_val_ref valy1 = &y->dw_loc_oprnd1; |
31845 | dw_val_ref valy2 = &y->dw_loc_oprnd2; |
31846 | |
31847 | switch (x->dw_loc_opc) |
31848 | { |
31849 | case DW_OP_const4u: |
31850 | case DW_OP_const8u: |
31851 | if (x->dtprel) |
31852 | goto hash_addr; |
31853 | /* FALLTHRU */ |
31854 | case DW_OP_const1u: |
31855 | case DW_OP_const1s: |
31856 | case DW_OP_const2u: |
31857 | case DW_OP_const2s: |
31858 | case DW_OP_const4s: |
31859 | case DW_OP_const8s: |
31860 | case DW_OP_constu: |
31861 | case DW_OP_consts: |
31862 | case DW_OP_pick: |
31863 | case DW_OP_plus_uconst: |
31864 | case DW_OP_breg0: |
31865 | case DW_OP_breg1: |
31866 | case DW_OP_breg2: |
31867 | case DW_OP_breg3: |
31868 | case DW_OP_breg4: |
31869 | case DW_OP_breg5: |
31870 | case DW_OP_breg6: |
31871 | case DW_OP_breg7: |
31872 | case DW_OP_breg8: |
31873 | case DW_OP_breg9: |
31874 | case DW_OP_breg10: |
31875 | case DW_OP_breg11: |
31876 | case DW_OP_breg12: |
31877 | case DW_OP_breg13: |
31878 | case DW_OP_breg14: |
31879 | case DW_OP_breg15: |
31880 | case DW_OP_breg16: |
31881 | case DW_OP_breg17: |
31882 | case DW_OP_breg18: |
31883 | case DW_OP_breg19: |
31884 | case DW_OP_breg20: |
31885 | case DW_OP_breg21: |
31886 | case DW_OP_breg22: |
31887 | case DW_OP_breg23: |
31888 | case DW_OP_breg24: |
31889 | case DW_OP_breg25: |
31890 | case DW_OP_breg26: |
31891 | case DW_OP_breg27: |
31892 | case DW_OP_breg28: |
31893 | case DW_OP_breg29: |
31894 | case DW_OP_breg30: |
31895 | case DW_OP_breg31: |
31896 | case DW_OP_regx: |
31897 | case DW_OP_fbreg: |
31898 | case DW_OP_piece: |
31899 | case DW_OP_deref_size: |
31900 | case DW_OP_xderef_size: |
31901 | return valx1->v.val_int == valy1->v.val_int; |
31902 | case DW_OP_skip: |
31903 | case DW_OP_bra: |
31904 | /* If splitting debug info, the use of DW_OP_GNU_addr_index |
31905 | can cause irrelevant differences in dw_loc_addr. */ |
31906 | gcc_assert (valx1->val_class == dw_val_class_loc |
31907 | && valy1->val_class == dw_val_class_loc |
31908 | && (dwarf_split_debug_info |
31909 | || x->dw_loc_addr == y->dw_loc_addr)); |
31910 | return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr; |
31911 | case DW_OP_implicit_value: |
31912 | if (valx1->v.val_unsigned != valy1->v.val_unsigned |
31913 | || valx2->val_class != valy2->val_class) |
31914 | return false; |
31915 | switch (valx2->val_class) |
31916 | { |
31917 | case dw_val_class_const: |
31918 | return valx2->v.val_int == valy2->v.val_int; |
31919 | case dw_val_class_vec: |
31920 | return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size |
31921 | && valx2->v.val_vec.length == valy2->v.val_vec.length |
31922 | && memcmp (s1: valx2->v.val_vec.array, s2: valy2->v.val_vec.array, |
31923 | n: valx2->v.val_vec.elt_size |
31924 | * valx2->v.val_vec.length) == 0; |
31925 | case dw_val_class_const_double: |
31926 | return valx2->v.val_double.low == valy2->v.val_double.low |
31927 | && valx2->v.val_double.high == valy2->v.val_double.high; |
31928 | case dw_val_class_wide_int: |
31929 | return *valx2->v.val_wide == *valy2->v.val_wide; |
31930 | case dw_val_class_addr: |
31931 | return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr); |
31932 | default: |
31933 | gcc_unreachable (); |
31934 | } |
31935 | case DW_OP_bregx: |
31936 | case DW_OP_bit_piece: |
31937 | return valx1->v.val_int == valy1->v.val_int |
31938 | && valx2->v.val_int == valy2->v.val_int; |
31939 | case DW_OP_addr: |
31940 | hash_addr: |
31941 | return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr); |
31942 | case DW_OP_GNU_addr_index: |
31943 | case DW_OP_addrx: |
31944 | case DW_OP_GNU_const_index: |
31945 | case DW_OP_constx: |
31946 | { |
31947 | rtx ax1 = valx1->val_entry->addr.rtl; |
31948 | rtx ay1 = valy1->val_entry->addr.rtl; |
31949 | return rtx_equal_p (ax1, ay1); |
31950 | } |
31951 | case DW_OP_implicit_pointer: |
31952 | case DW_OP_GNU_implicit_pointer: |
31953 | return valx1->val_class == dw_val_class_die_ref |
31954 | && valx1->val_class == valy1->val_class |
31955 | && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die |
31956 | && valx2->v.val_int == valy2->v.val_int; |
31957 | case DW_OP_entry_value: |
31958 | case DW_OP_GNU_entry_value: |
31959 | return compare_loc_operands (x: valx1->v.val_loc, y: valy1->v.val_loc); |
31960 | case DW_OP_const_type: |
31961 | case DW_OP_GNU_const_type: |
31962 | if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die |
31963 | || valx2->val_class != valy2->val_class) |
31964 | return false; |
31965 | switch (valx2->val_class) |
31966 | { |
31967 | case dw_val_class_const: |
31968 | return valx2->v.val_int == valy2->v.val_int; |
31969 | case dw_val_class_vec: |
31970 | return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size |
31971 | && valx2->v.val_vec.length == valy2->v.val_vec.length |
31972 | && memcmp (s1: valx2->v.val_vec.array, s2: valy2->v.val_vec.array, |
31973 | n: valx2->v.val_vec.elt_size |
31974 | * valx2->v.val_vec.length) == 0; |
31975 | case dw_val_class_const_double: |
31976 | return valx2->v.val_double.low == valy2->v.val_double.low |
31977 | && valx2->v.val_double.high == valy2->v.val_double.high; |
31978 | case dw_val_class_wide_int: |
31979 | return *valx2->v.val_wide == *valy2->v.val_wide; |
31980 | default: |
31981 | gcc_unreachable (); |
31982 | } |
31983 | case DW_OP_regval_type: |
31984 | case DW_OP_deref_type: |
31985 | case DW_OP_GNU_regval_type: |
31986 | case DW_OP_GNU_deref_type: |
31987 | return valx1->v.val_int == valy1->v.val_int |
31988 | && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die; |
31989 | case DW_OP_convert: |
31990 | case DW_OP_reinterpret: |
31991 | case DW_OP_GNU_convert: |
31992 | case DW_OP_GNU_reinterpret: |
31993 | if (valx1->val_class != valy1->val_class) |
31994 | return false; |
31995 | if (valx1->val_class == dw_val_class_unsigned_const) |
31996 | return valx1->v.val_unsigned == valy1->v.val_unsigned; |
31997 | return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die; |
31998 | case DW_OP_GNU_parameter_ref: |
31999 | return valx1->val_class == dw_val_class_die_ref |
32000 | && valx1->val_class == valy1->val_class |
32001 | && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die; |
32002 | default: |
32003 | /* Other codes have no operands. */ |
32004 | return true; |
32005 | } |
32006 | } |
32007 | |
32008 | /* Return true if DWARF location expressions X and Y are the same. */ |
32009 | |
32010 | static inline bool |
32011 | compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y) |
32012 | { |
32013 | for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next) |
32014 | if (x->dw_loc_opc != y->dw_loc_opc |
32015 | || x->dtprel != y->dtprel |
32016 | || !compare_loc_operands (x, y)) |
32017 | break; |
32018 | return x == NULL && y == NULL; |
32019 | } |
32020 | |
32021 | /* Hashtable helpers. */ |
32022 | |
32023 | struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct> |
32024 | { |
32025 | static inline hashval_t hash (const dw_loc_list_struct *); |
32026 | static inline bool equal (const dw_loc_list_struct *, |
32027 | const dw_loc_list_struct *); |
32028 | }; |
32029 | |
32030 | /* Return precomputed hash of location list X. */ |
32031 | |
32032 | inline hashval_t |
32033 | loc_list_hasher::hash (const dw_loc_list_struct *x) |
32034 | { |
32035 | return x->hash; |
32036 | } |
32037 | |
32038 | /* Return true if location lists A and B are the same. */ |
32039 | |
32040 | inline bool |
32041 | loc_list_hasher::equal (const dw_loc_list_struct *a, |
32042 | const dw_loc_list_struct *b) |
32043 | { |
32044 | if (a == b) |
32045 | return true; |
32046 | if (a->hash != b->hash) |
32047 | return false; |
32048 | for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next) |
32049 | if (strcmp (s1: a->begin, s2: b->begin) != 0 |
32050 | || strcmp (s1: a->end, s2: b->end) != 0 |
32051 | || (a->section == NULL) != (b->section == NULL) |
32052 | || (a->section && strcmp (s1: a->section, s2: b->section) != 0) |
32053 | || a->vbegin != b->vbegin || a->vend != b->vend |
32054 | || !compare_locs (x: a->expr, y: b->expr)) |
32055 | break; |
32056 | return a == NULL && b == NULL; |
32057 | } |
32058 | |
32059 | typedef hash_table<loc_list_hasher> loc_list_hash_type; |
32060 | |
32061 | |
32062 | /* Recursively optimize location lists referenced from DIE |
32063 | children and share them whenever possible. */ |
32064 | |
32065 | static void |
32066 | optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab) |
32067 | { |
32068 | dw_die_ref c; |
32069 | dw_attr_node *a; |
32070 | unsigned ix; |
32071 | dw_loc_list_struct **slot; |
32072 | bool drop_locviews = false; |
32073 | bool has_locviews = false; |
32074 | |
32075 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
32076 | if (AT_class (a) == dw_val_class_loc_list) |
32077 | { |
32078 | dw_loc_list_ref list = AT_loc_list (a); |
32079 | /* TODO: perform some optimizations here, before hashing |
32080 | it and storing into the hash table. */ |
32081 | hash_loc_list (list_head: list); |
32082 | slot = htab->find_slot_with_hash (comparable: list, hash: list->hash, insert: INSERT); |
32083 | if (*slot == NULL) |
32084 | { |
32085 | *slot = list; |
32086 | if (loc_list_has_views (list)) |
32087 | gcc_assert (list->vl_symbol); |
32088 | else if (list->vl_symbol) |
32089 | { |
32090 | drop_locviews = true; |
32091 | list->vl_symbol = NULL; |
32092 | } |
32093 | } |
32094 | else |
32095 | { |
32096 | if (list->vl_symbol && !(*slot)->vl_symbol) |
32097 | drop_locviews = true; |
32098 | a->dw_attr_val.v.val_loc_list = *slot; |
32099 | } |
32100 | } |
32101 | else if (AT_class (a) == dw_val_class_view_list) |
32102 | { |
32103 | gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews); |
32104 | has_locviews = true; |
32105 | } |
32106 | |
32107 | |
32108 | if (drop_locviews && has_locviews) |
32109 | remove_AT (die, attr_kind: DW_AT_GNU_locviews); |
32110 | |
32111 | FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab)); |
32112 | } |
32113 | |
32114 | |
32115 | /* Recursively assign each location list a unique index into the debug_addr |
32116 | section. */ |
32117 | |
32118 | static void |
32119 | index_location_lists (dw_die_ref die) |
32120 | { |
32121 | dw_die_ref c; |
32122 | dw_attr_node *a; |
32123 | unsigned ix; |
32124 | |
32125 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
32126 | if (AT_class (a) == dw_val_class_loc_list) |
32127 | { |
32128 | dw_loc_list_ref list = AT_loc_list (a); |
32129 | dw_loc_list_ref curr; |
32130 | for (curr = list; curr != NULL; curr = curr->dw_loc_next) |
32131 | { |
32132 | /* Don't index an entry that has already been indexed |
32133 | or won't be output. Make sure skip_loc_list_entry doesn't |
32134 | call size_of_locs, because that might cause circular dependency, |
32135 | index_location_lists requiring address table indexes to be |
32136 | computed, but adding new indexes through add_addr_table_entry |
32137 | and address table index computation requiring no new additions |
32138 | to the hash table. In the rare case of DWARF[234] >= 64KB |
32139 | location expression, we'll just waste unused address table entry |
32140 | for it. */ |
32141 | if (curr->begin_entry != NULL || skip_loc_list_entry (curr)) |
32142 | continue; |
32143 | |
32144 | curr->begin_entry |
32145 | = add_addr_table_entry (addr: xstrdup (curr->begin), kind: ate_kind_label); |
32146 | if (dwarf_version >= 5 && !HAVE_AS_LEB128) |
32147 | curr->end_entry |
32148 | = add_addr_table_entry (addr: xstrdup (curr->end), kind: ate_kind_label); |
32149 | } |
32150 | } |
32151 | |
32152 | FOR_EACH_CHILD (die, c, index_location_lists (c)); |
32153 | } |
32154 | |
32155 | /* Optimize location lists referenced from DIE |
32156 | children and share them whenever possible. */ |
32157 | |
32158 | static void |
32159 | optimize_location_lists (dw_die_ref die) |
32160 | { |
32161 | loc_list_hash_type htab (500); |
32162 | optimize_location_lists_1 (die, htab: &htab); |
32163 | } |
32164 | |
32165 | /* Traverse the limbo die list, and add parent/child links. The only |
32166 | dies without parents that should be here are concrete instances of |
32167 | inline functions, and the comp_unit_die. We can ignore the comp_unit_die. |
32168 | For concrete instances, we can get the parent die from the abstract |
32169 | instance. */ |
32170 | |
32171 | static void |
32172 | flush_limbo_die_list (void) |
32173 | { |
32174 | limbo_die_node *node; |
32175 | |
32176 | /* get_context_die calls force_decl_die, which can put new DIEs on the |
32177 | limbo list in LTO mode when nested functions are put in a different |
32178 | partition than that of their parent function. */ |
32179 | while ((node = limbo_die_list)) |
32180 | { |
32181 | dw_die_ref die = node->die; |
32182 | limbo_die_list = node->next; |
32183 | |
32184 | if (die->die_parent == NULL) |
32185 | { |
32186 | dw_die_ref origin = get_AT_ref (die, attr_kind: DW_AT_abstract_origin); |
32187 | |
32188 | if (origin && origin->die_parent) |
32189 | add_child_die (die: origin->die_parent, child_die: die); |
32190 | else if (is_cu_die (c: die)) |
32191 | ; |
32192 | else if (seen_error ()) |
32193 | /* It's OK to be confused by errors in the input. */ |
32194 | add_child_die (die: comp_unit_die (), child_die: die); |
32195 | else |
32196 | { |
32197 | /* In certain situations, the lexical block containing a |
32198 | nested function can be optimized away, which results |
32199 | in the nested function die being orphaned. Likewise |
32200 | with the return type of that nested function. Force |
32201 | this to be a child of the containing function. |
32202 | |
32203 | It may happen that even the containing function got fully |
32204 | inlined and optimized out. In that case we are lost and |
32205 | assign the empty child. This should not be big issue as |
32206 | the function is likely unreachable too. */ |
32207 | gcc_assert (node->created_for); |
32208 | |
32209 | if (DECL_P (node->created_for)) |
32210 | origin = get_context_die (DECL_CONTEXT (node->created_for)); |
32211 | else if (TYPE_P (node->created_for)) |
32212 | origin = scope_die_for (t: node->created_for, context_die: comp_unit_die ()); |
32213 | else |
32214 | origin = comp_unit_die (); |
32215 | |
32216 | add_child_die (die: origin, child_die: die); |
32217 | } |
32218 | } |
32219 | } |
32220 | } |
32221 | |
32222 | /* Reset DIEs so we can output them again. */ |
32223 | |
32224 | static void |
32225 | reset_dies (dw_die_ref die) |
32226 | { |
32227 | dw_die_ref c; |
32228 | |
32229 | /* Remove stuff we re-generate. */ |
32230 | die->die_mark = 0; |
32231 | die->die_offset = 0; |
32232 | die->die_abbrev = 0; |
32233 | remove_AT (die, attr_kind: DW_AT_sibling); |
32234 | |
32235 | FOR_EACH_CHILD (die, c, reset_dies (c)); |
32236 | } |
32237 | |
32238 | /* reset_indirect_string removed the references coming from DW_AT_name |
32239 | and DW_AT_comp_dir attributes on compilation unit DIEs. Readd them as |
32240 | .debug_line_str strings again. */ |
32241 | |
32242 | static void |
32243 | adjust_name_comp_dir (dw_die_ref die) |
32244 | { |
32245 | for (int i = 0; i < 2; i++) |
32246 | { |
32247 | dwarf_attribute attr_kind = i ? DW_AT_comp_dir : DW_AT_name; |
32248 | dw_attr_node *a = get_AT (die, attr_kind); |
32249 | if (a == NULL || a->dw_attr_val.val_class != dw_val_class_str) |
32250 | continue; |
32251 | |
32252 | if (!debug_line_str_hash) |
32253 | debug_line_str_hash |
32254 | = hash_table<indirect_string_hasher>::create_ggc (n: 10); |
32255 | |
32256 | struct indirect_string_node *node |
32257 | = find_AT_string_in_table (str: a->dw_attr_val.v.val_str->str, |
32258 | table: debug_line_str_hash); |
32259 | set_indirect_string (node); |
32260 | node->form = DW_FORM_line_strp; |
32261 | a->dw_attr_val.v.val_str = node; |
32262 | } |
32263 | } |
32264 | |
32265 | /* Output stuff that dwarf requires at the end of every file, |
32266 | and generate the DWARF-2 debugging info. */ |
32267 | |
32268 | static void |
32269 | dwarf2out_finish (const char *filename) |
32270 | { |
32271 | comdat_type_node *ctnode; |
32272 | dw_die_ref main_comp_unit_die; |
32273 | unsigned char checksum[16]; |
32274 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
32275 | |
32276 | /* Generate CTF/BTF debug info. */ |
32277 | if ((ctf_debug_info_level > CTFINFO_LEVEL_NONE |
32278 | || btf_debuginfo_p ()) && lang_GNU_C ()) |
32279 | ctf_debug_finish (filename); |
32280 | |
32281 | /* Skip emitting DWARF if not required. */ |
32282 | if (!dwarf_debuginfo_p ()) |
32283 | return; |
32284 | |
32285 | /* Flush out any latecomers to the limbo party. */ |
32286 | flush_limbo_die_list (); |
32287 | |
32288 | if (inline_entry_data_table) |
32289 | gcc_assert (inline_entry_data_table->is_empty ()); |
32290 | |
32291 | if (flag_checking) |
32292 | { |
32293 | verify_die (die: comp_unit_die ()); |
32294 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
32295 | verify_die (die: node->die); |
32296 | } |
32297 | |
32298 | /* We shouldn't have any symbols with delayed asm names for |
32299 | DIEs generated after early finish. */ |
32300 | gcc_assert (deferred_asm_name == NULL); |
32301 | |
32302 | gen_remaining_tmpl_value_param_die_attribute (); |
32303 | |
32304 | if (flag_generate_lto || flag_generate_offload) |
32305 | { |
32306 | gcc_assert (flag_fat_lto_objects || flag_generate_offload); |
32307 | |
32308 | /* Prune stuff so that dwarf2out_finish runs successfully |
32309 | for the fat part of the object. */ |
32310 | reset_dies (die: comp_unit_die ()); |
32311 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
32312 | reset_dies (die: node->die); |
32313 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
32314 | { |
32315 | /* Remove the pointer to the line table. */ |
32316 | remove_AT (die: ctnode->root_die, attr_kind: DW_AT_stmt_list); |
32317 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
32318 | reset_dies (die: ctnode->root_die); |
32319 | } |
32320 | |
32321 | /* Reset die CU symbol so we don't output it twice. */ |
32322 | comp_unit_die ()->die_id.die_symbol = NULL; |
32323 | |
32324 | /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */ |
32325 | remove_AT (die: comp_unit_die (), attr_kind: DW_AT_stmt_list); |
32326 | if (have_macinfo) |
32327 | remove_AT (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE); |
32328 | |
32329 | /* Remove indirect string decisions. */ |
32330 | debug_str_hash->traverse<void *, reset_indirect_string> (NULL); |
32331 | if (debug_line_str_hash) |
32332 | { |
32333 | debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL); |
32334 | debug_line_str_hash = NULL; |
32335 | if (asm_outputs_debug_line_str ()) |
32336 | { |
32337 | adjust_name_comp_dir (die: comp_unit_die ()); |
32338 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
32339 | adjust_name_comp_dir (die: node->die); |
32340 | } |
32341 | } |
32342 | } |
32343 | |
32344 | #if ENABLE_ASSERT_CHECKING |
32345 | { |
32346 | dw_die_ref die = comp_unit_die (), c; |
32347 | FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark)); |
32348 | } |
32349 | #endif |
32350 | base_types.truncate (size: 0); |
32351 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
32352 | resolve_addr (die: ctnode->root_die); |
32353 | resolve_addr (die: comp_unit_die ()); |
32354 | move_marked_base_types (); |
32355 | |
32356 | if (dump_file) |
32357 | { |
32358 | fprintf (stream: dump_file, format: "DWARF for %s\n" , filename); |
32359 | print_die (die: comp_unit_die (), outfile: dump_file); |
32360 | } |
32361 | |
32362 | /* Initialize sections and labels used for actual assembler output. */ |
32363 | unsigned generation = init_sections_and_labels (early_lto_debug: false); |
32364 | |
32365 | /* Traverse the DIE's and add sibling attributes to those DIE's that |
32366 | have children. */ |
32367 | add_sibling_attributes (die: comp_unit_die ()); |
32368 | limbo_die_node *node; |
32369 | for (node = cu_die_list; node; node = node->next) |
32370 | add_sibling_attributes (die: node->die); |
32371 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
32372 | add_sibling_attributes (die: ctnode->root_die); |
32373 | |
32374 | /* When splitting DWARF info, we put some attributes in the |
32375 | skeleton compile_unit DIE that remains in the .o, while |
32376 | most attributes go in the DWO compile_unit_die. */ |
32377 | if (dwarf_split_debug_info) |
32378 | { |
32379 | limbo_die_node *cu; |
32380 | main_comp_unit_die = gen_compile_unit_die (NULL); |
32381 | if (dwarf_version >= 5) |
32382 | main_comp_unit_die->die_tag = DW_TAG_skeleton_unit; |
32383 | cu = limbo_die_list; |
32384 | gcc_assert (cu->die == main_comp_unit_die); |
32385 | limbo_die_list = limbo_die_list->next; |
32386 | cu->next = cu_die_list; |
32387 | cu_die_list = cu; |
32388 | } |
32389 | else |
32390 | main_comp_unit_die = comp_unit_die (); |
32391 | |
32392 | /* Output a terminator label for the .text section. */ |
32393 | switch_to_section (text_section); |
32394 | targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0); |
32395 | if (cold_text_section) |
32396 | { |
32397 | switch_to_section (cold_text_section); |
32398 | targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0); |
32399 | } |
32400 | |
32401 | /* We can only use the low/high_pc attributes if all of the code was |
32402 | in .text. */ |
32403 | if ((!have_multiple_function_sections |
32404 | && vec_safe_length (v: switch_text_ranges) < 2) |
32405 | || (dwarf_version < 3 && dwarf_strict)) |
32406 | { |
32407 | const char *end_label = text_end_label; |
32408 | if (vec_safe_length (v: switch_text_ranges) == 1) |
32409 | end_label = (*switch_text_ranges)[0]; |
32410 | /* Don't add if the CU has no associated code. */ |
32411 | if (switch_text_ranges) |
32412 | add_AT_low_high_pc (die: main_comp_unit_die, lbl_low: text_section_label, |
32413 | lbl_high: end_label, force_direct: true); |
32414 | } |
32415 | else |
32416 | { |
32417 | unsigned fde_idx; |
32418 | dw_fde_ref fde; |
32419 | bool range_list_added = false; |
32420 | if (switch_text_ranges) |
32421 | { |
32422 | const char *prev_loc = text_section_label; |
32423 | const char *loc; |
32424 | unsigned idx; |
32425 | |
32426 | FOR_EACH_VEC_ELT (*switch_text_ranges, idx, loc) |
32427 | if (prev_loc) |
32428 | { |
32429 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
32430 | end: loc, added: &range_list_added, force_direct: true); |
32431 | prev_loc = NULL; |
32432 | } |
32433 | else |
32434 | prev_loc = loc; |
32435 | |
32436 | if (prev_loc) |
32437 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
32438 | end: text_end_label, added: &range_list_added, force_direct: true); |
32439 | } |
32440 | |
32441 | if (switch_cold_ranges) |
32442 | { |
32443 | const char *prev_loc = cold_text_section_label; |
32444 | const char *loc; |
32445 | unsigned idx; |
32446 | |
32447 | FOR_EACH_VEC_ELT (*switch_cold_ranges, idx, loc) |
32448 | if (prev_loc) |
32449 | { |
32450 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
32451 | end: loc, added: &range_list_added, force_direct: true); |
32452 | prev_loc = NULL; |
32453 | } |
32454 | else |
32455 | prev_loc = loc; |
32456 | |
32457 | if (prev_loc) |
32458 | add_ranges_by_labels (die: main_comp_unit_die, begin: prev_loc, |
32459 | end: cold_end_label, added: &range_list_added, force_direct: true); |
32460 | } |
32461 | |
32462 | FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde) |
32463 | { |
32464 | if (fde->ignored_debug) |
32465 | continue; |
32466 | if (!fde->in_std_section) |
32467 | add_ranges_by_labels (die: main_comp_unit_die, begin: fde->dw_fde_begin, |
32468 | end: fde->dw_fde_end, added: &range_list_added, |
32469 | force_direct: true); |
32470 | if (fde->dw_fde_second_begin && !fde->second_in_std_section) |
32471 | add_ranges_by_labels (die: main_comp_unit_die, begin: fde->dw_fde_second_begin, |
32472 | end: fde->dw_fde_second_end, added: &range_list_added, |
32473 | force_direct: true); |
32474 | } |
32475 | |
32476 | if (range_list_added) |
32477 | { |
32478 | /* We need to give .debug_loc and .debug_ranges an appropriate |
32479 | "base address". Use zero so that these addresses become |
32480 | absolute. Historically, we've emitted the unexpected |
32481 | DW_AT_entry_pc instead of DW_AT_low_pc for this purpose. |
32482 | Emit both to give time for other tools to adapt. */ |
32483 | add_AT_addr (die: main_comp_unit_die, attr_kind: DW_AT_low_pc, const0_rtx, force_direct: true); |
32484 | if (! dwarf_strict && dwarf_version < 4) |
32485 | add_AT_addr (die: main_comp_unit_die, attr_kind: DW_AT_entry_pc, const0_rtx, force_direct: true); |
32486 | |
32487 | add_ranges (NULL); |
32488 | have_multiple_function_sections = true; |
32489 | } |
32490 | } |
32491 | |
32492 | /* AIX Assembler inserts the length, so adjust the reference to match the |
32493 | offset expected by debuggers. */ |
32494 | strcpy (dest: dl_section_ref, src: debug_line_section_label); |
32495 | if (XCOFF_DEBUGGING_INFO) |
32496 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
32497 | |
32498 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
32499 | add_AT_lineptr (die: main_comp_unit_die, attr_kind: DW_AT_stmt_list, |
32500 | label: dl_section_ref); |
32501 | |
32502 | if (have_macinfo) |
32503 | add_AT_macptr (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE, |
32504 | label: macinfo_section_label); |
32505 | |
32506 | if (dwarf_split_debug_info) |
32507 | { |
32508 | if (have_location_lists) |
32509 | { |
32510 | /* Since we generate the loclists in the split DWARF .dwo |
32511 | file itself, we don't need to generate a loclists_base |
32512 | attribute for the split compile unit DIE. That attribute |
32513 | (and using relocatable sec_offset FORMs) isn't allowed |
32514 | for a split compile unit. Only if the .debug_loclists |
32515 | section was in the main file, would we need to generate a |
32516 | loclists_base attribute here (for the full or skeleton |
32517 | unit DIE). */ |
32518 | |
32519 | /* optimize_location_lists calculates the size of the lists, |
32520 | so index them first, and assign indices to the entries. |
32521 | Although optimize_location_lists will remove entries from |
32522 | the table, it only does so for duplicates, and therefore |
32523 | only reduces ref_counts to 1. */ |
32524 | index_location_lists (die: comp_unit_die ()); |
32525 | } |
32526 | |
32527 | if (dwarf_version >= 5 && !vec_safe_is_empty (v: ranges_table)) |
32528 | index_rnglists (); |
32529 | |
32530 | if (addr_index_table != NULL) |
32531 | { |
32532 | unsigned int index = 0; |
32533 | addr_index_table |
32534 | ->traverse_noresize<unsigned int *, index_addr_table_entry> |
32535 | (argument: &index); |
32536 | } |
32537 | } |
32538 | |
32539 | loc_list_idx = 0; |
32540 | if (have_location_lists) |
32541 | { |
32542 | optimize_location_lists (die: comp_unit_die ()); |
32543 | /* And finally assign indexes to the entries for -gsplit-dwarf. */ |
32544 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
32545 | assign_location_list_indexes (die: comp_unit_die ()); |
32546 | } |
32547 | |
32548 | save_macinfo_strings (); |
32549 | |
32550 | if (dwarf_split_debug_info) |
32551 | { |
32552 | unsigned int index = 0; |
32553 | |
32554 | /* Add attributes common to skeleton compile_units and |
32555 | type_units. Because these attributes include strings, it |
32556 | must be done before freezing the string table. Top-level |
32557 | skeleton die attrs are added when the skeleton type unit is |
32558 | created, so ensure it is created by this point. */ |
32559 | add_top_level_skeleton_die_attrs (die: main_comp_unit_die); |
32560 | debug_str_hash->traverse_noresize<unsigned int *, index_string> (argument: &index); |
32561 | } |
32562 | |
32563 | /* Output all of the compilation units. We put the main one last so that |
32564 | the offsets are available to output_pubnames. */ |
32565 | for (node = cu_die_list; node; node = node->next) |
32566 | output_comp_unit (die: node->die, output_if_empty: 0, NULL); |
32567 | |
32568 | hash_table<comdat_type_hasher> comdat_type_table (100); |
32569 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
32570 | { |
32571 | comdat_type_node **slot = comdat_type_table.find_slot (value: ctnode, insert: INSERT); |
32572 | |
32573 | /* Don't output duplicate types. */ |
32574 | if (*slot != HTAB_EMPTY_ENTRY) |
32575 | continue; |
32576 | |
32577 | /* Add a pointer to the line table for the main compilation unit |
32578 | so that the debugger can make sense of DW_AT_decl_file |
32579 | attributes. */ |
32580 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
32581 | add_AT_lineptr (die: ctnode->root_die, attr_kind: DW_AT_stmt_list, |
32582 | label: (!dwarf_split_debug_info |
32583 | ? dl_section_ref |
32584 | : debug_skeleton_line_section_label)); |
32585 | |
32586 | output_comdat_type_unit (node: ctnode, early_lto_debug: false); |
32587 | *slot = ctnode; |
32588 | } |
32589 | |
32590 | if (dwarf_split_debug_info) |
32591 | { |
32592 | int mark; |
32593 | struct md5_ctx ctx; |
32594 | |
32595 | /* Compute a checksum of the comp_unit to use as the dwo_id. */ |
32596 | md5_init_ctx (ctx: &ctx); |
32597 | mark = 0; |
32598 | die_checksum (die: comp_unit_die (), ctx: &ctx, mark: &mark); |
32599 | unmark_all_dies (die: comp_unit_die ()); |
32600 | md5_finish_ctx (ctx: &ctx, resbuf: checksum); |
32601 | |
32602 | if (dwarf_version < 5) |
32603 | { |
32604 | /* Use the first 8 bytes of the checksum as the dwo_id, |
32605 | and add it to both comp-unit DIEs. */ |
32606 | add_AT_data8 (die: main_comp_unit_die, attr_kind: DW_AT_GNU_dwo_id, data8: checksum); |
32607 | add_AT_data8 (die: comp_unit_die (), attr_kind: DW_AT_GNU_dwo_id, data8: checksum); |
32608 | } |
32609 | |
32610 | /* Add the base offset of the ranges table to the skeleton |
32611 | comp-unit DIE. */ |
32612 | if (!vec_safe_is_empty (v: ranges_table)) |
32613 | { |
32614 | if (dwarf_version < 5) |
32615 | add_AT_lineptr (die: main_comp_unit_die, attr_kind: DW_AT_GNU_ranges_base, |
32616 | label: ranges_section_label); |
32617 | } |
32618 | |
32619 | output_addr_table (); |
32620 | } |
32621 | |
32622 | /* Output the main compilation unit if non-empty or if .debug_macinfo |
32623 | or .debug_macro will be emitted. */ |
32624 | output_comp_unit (die: comp_unit_die (), have_macinfo, |
32625 | dwarf_split_debug_info ? checksum : NULL); |
32626 | |
32627 | if (dwarf_split_debug_info && info_section_emitted) |
32628 | output_skeleton_debug_sections (comp_unit: main_comp_unit_die, dwo_id: checksum); |
32629 | |
32630 | /* Output the abbreviation table. */ |
32631 | if (vec_safe_length (v: abbrev_die_table) != 1) |
32632 | { |
32633 | switch_to_section (debug_abbrev_section); |
32634 | ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label); |
32635 | output_abbrev_section (); |
32636 | } |
32637 | |
32638 | /* Output location list section if necessary. */ |
32639 | if (have_location_lists) |
32640 | { |
32641 | char l1[MAX_ARTIFICIAL_LABEL_BYTES]; |
32642 | char l2[MAX_ARTIFICIAL_LABEL_BYTES]; |
32643 | /* Output the location lists info. */ |
32644 | switch_to_section (debug_loc_section); |
32645 | if (dwarf_version >= 5) |
32646 | { |
32647 | ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2); |
32648 | ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3); |
32649 | if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4) |
32650 | dw2_asm_output_data (4, 0xffffffff, |
32651 | "Initial length escape value indicating " |
32652 | "64-bit DWARF extension" ); |
32653 | dw2_asm_output_delta (dwarf_offset_size, l2, l1, |
32654 | "Length of Location Lists" ); |
32655 | ASM_OUTPUT_LABEL (asm_out_file, l1); |
32656 | output_dwarf_version (); |
32657 | dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size" ); |
32658 | dw2_asm_output_data (1, 0, "Segment Size" ); |
32659 | dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0, |
32660 | "Offset Entry Count" ); |
32661 | } |
32662 | ASM_OUTPUT_LABEL (asm_out_file, loc_section_label); |
32663 | if (dwarf_version >= 5 && dwarf_split_debug_info) |
32664 | { |
32665 | unsigned int save_loc_list_idx = loc_list_idx; |
32666 | loc_list_idx = 0; |
32667 | output_loclists_offsets (die: comp_unit_die ()); |
32668 | gcc_assert (save_loc_list_idx == loc_list_idx); |
32669 | } |
32670 | output_location_lists (die: comp_unit_die ()); |
32671 | if (dwarf_version >= 5) |
32672 | ASM_OUTPUT_LABEL (asm_out_file, l2); |
32673 | } |
32674 | |
32675 | output_pubtables (); |
32676 | |
32677 | /* Output the address range information if a CU (.debug_info section) |
32678 | was emitted. We output an empty table even if we had no functions |
32679 | to put in it. This because the consumer has no way to tell the |
32680 | difference between an empty table that we omitted and failure to |
32681 | generate a table that would have contained data. */ |
32682 | if (info_section_emitted) |
32683 | { |
32684 | switch_to_section (debug_aranges_section); |
32685 | output_aranges (); |
32686 | } |
32687 | |
32688 | /* Output ranges section if necessary. */ |
32689 | if (!vec_safe_is_empty (v: ranges_table)) |
32690 | { |
32691 | if (dwarf_version >= 5) |
32692 | { |
32693 | if (dwarf_split_debug_info) |
32694 | { |
32695 | /* We don't know right now whether there are any |
32696 | ranges for .debug_rnglists and any for .debug_rnglists.dwo. |
32697 | Depending on into which of those two belongs the first |
32698 | ranges_table entry, emit that section first and that |
32699 | output_rnglists call will return true if the other kind of |
32700 | ranges needs to be emitted as well. */ |
32701 | bool dwo = (*ranges_table)[0].idx != DW_RANGES_IDX_SKELETON; |
32702 | if (output_rnglists (generation, dwo)) |
32703 | output_rnglists (generation, dwo: !dwo); |
32704 | } |
32705 | else |
32706 | output_rnglists (generation, dwo: false); |
32707 | } |
32708 | else |
32709 | output_ranges (); |
32710 | } |
32711 | |
32712 | /* Have to end the macro section. */ |
32713 | if (have_macinfo) |
32714 | { |
32715 | switch_to_section (debug_macinfo_section); |
32716 | ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label); |
32717 | output_macinfo (debug_line_label: !dwarf_split_debug_info ? debug_line_section_label |
32718 | : debug_skeleton_line_section_label, early_lto_debug: false); |
32719 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
32720 | } |
32721 | |
32722 | /* Output the source line correspondence table. We must do this |
32723 | even if there is no line information. Otherwise, on an empty |
32724 | translation unit, we will generate a present, but empty, |
32725 | .debug_info section. IRIX 6.5 `nm' will then complain when |
32726 | examining the file. This is done late so that any filenames |
32727 | used by the debug_info section are marked as 'used'. */ |
32728 | switch_to_section (debug_line_section); |
32729 | ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label); |
32730 | if (! output_asm_line_debug_info ()) |
32731 | output_line_info (prologue_only: false); |
32732 | |
32733 | if (dwarf_split_debug_info && info_section_emitted) |
32734 | { |
32735 | switch_to_section (debug_skeleton_line_section); |
32736 | ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label); |
32737 | output_line_info (prologue_only: true); |
32738 | } |
32739 | |
32740 | /* If we emitted any indirect strings, output the string table too. */ |
32741 | if (debug_str_hash || skeleton_debug_str_hash) |
32742 | output_indirect_strings (); |
32743 | if (debug_line_str_hash) |
32744 | { |
32745 | switch_to_section (debug_line_str_section); |
32746 | const enum dwarf_form form = DW_FORM_line_strp; |
32747 | debug_line_str_hash->traverse<enum dwarf_form, |
32748 | output_indirect_string> (argument: form); |
32749 | } |
32750 | |
32751 | /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */ |
32752 | symview_upper_bound = 0; |
32753 | if (zero_view_p) |
32754 | bitmap_clear (zero_view_p); |
32755 | } |
32756 | |
32757 | /* Returns a hash value for X (which really is a variable_value_struct). */ |
32758 | |
32759 | inline hashval_t |
32760 | variable_value_hasher::hash (variable_value_struct *x) |
32761 | { |
32762 | return (hashval_t) x->decl_id; |
32763 | } |
32764 | |
32765 | /* Return true if decl_id of variable_value_struct X is the same as |
32766 | UID of decl Y. */ |
32767 | |
32768 | inline bool |
32769 | variable_value_hasher::equal (variable_value_struct *x, tree y) |
32770 | { |
32771 | return x->decl_id == DECL_UID (y); |
32772 | } |
32773 | |
32774 | /* Helper function for resolve_variable_value, handle |
32775 | DW_OP_GNU_variable_value in one location expression. |
32776 | Return true if exprloc has been changed into loclist. */ |
32777 | |
32778 | static bool |
32779 | resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc) |
32780 | { |
32781 | dw_loc_descr_ref next; |
32782 | for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next) |
32783 | { |
32784 | next = loc->dw_loc_next; |
32785 | if (loc->dw_loc_opc != DW_OP_GNU_variable_value |
32786 | || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref) |
32787 | continue; |
32788 | |
32789 | tree decl = loc->dw_loc_oprnd1.v.val_decl_ref; |
32790 | if (DECL_CONTEXT (decl) != current_function_decl) |
32791 | continue; |
32792 | |
32793 | dw_die_ref ref = lookup_decl_die (decl); |
32794 | if (ref) |
32795 | { |
32796 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
32797 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
32798 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
32799 | continue; |
32800 | } |
32801 | dw_loc_list_ref l = loc_list_from_tree (loc: decl, want_address: 0, NULL); |
32802 | if (l == NULL) |
32803 | continue; |
32804 | if (l->dw_loc_next) |
32805 | { |
32806 | if (AT_class (a) != dw_val_class_loc) |
32807 | continue; |
32808 | switch (a->dw_attr) |
32809 | { |
32810 | /* Following attributes allow both exprloc and loclist |
32811 | classes, so we can change them into a loclist. */ |
32812 | case DW_AT_location: |
32813 | case DW_AT_string_length: |
32814 | case DW_AT_return_addr: |
32815 | case DW_AT_data_member_location: |
32816 | case DW_AT_frame_base: |
32817 | case DW_AT_segment: |
32818 | case DW_AT_static_link: |
32819 | case DW_AT_use_location: |
32820 | case DW_AT_vtable_elem_location: |
32821 | if (prev) |
32822 | { |
32823 | prev->dw_loc_next = NULL; |
32824 | prepend_loc_descr_to_each (list: l, ref: AT_loc (a)); |
32825 | } |
32826 | if (next) |
32827 | add_loc_descr_to_each (list: l, ref: next); |
32828 | a->dw_attr_val.val_class = dw_val_class_loc_list; |
32829 | a->dw_attr_val.val_entry = NULL; |
32830 | a->dw_attr_val.v.val_loc_list = l; |
32831 | have_location_lists = true; |
32832 | return true; |
32833 | /* Following attributes allow both exprloc and reference, |
32834 | so if the whole expression is DW_OP_GNU_variable_value alone |
32835 | we could transform it into reference. */ |
32836 | case DW_AT_byte_size: |
32837 | case DW_AT_bit_size: |
32838 | case DW_AT_lower_bound: |
32839 | case DW_AT_upper_bound: |
32840 | case DW_AT_bit_stride: |
32841 | case DW_AT_count: |
32842 | case DW_AT_allocated: |
32843 | case DW_AT_associated: |
32844 | case DW_AT_byte_stride: |
32845 | if (prev == NULL && next == NULL) |
32846 | break; |
32847 | /* FALLTHRU */ |
32848 | default: |
32849 | if (dwarf_strict) |
32850 | continue; |
32851 | break; |
32852 | } |
32853 | /* Create DW_TAG_variable that we can refer to. */ |
32854 | gen_decl_die (decl, NULL_TREE, NULL, |
32855 | context_die: lookup_decl_die (decl: current_function_decl)); |
32856 | ref = lookup_decl_die (decl); |
32857 | if (ref) |
32858 | { |
32859 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
32860 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
32861 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
32862 | } |
32863 | continue; |
32864 | } |
32865 | if (prev) |
32866 | { |
32867 | prev->dw_loc_next = l->expr; |
32868 | add_loc_descr (list_head: &prev->dw_loc_next, descr: next); |
32869 | free_loc_descr (loc, NULL); |
32870 | next = prev->dw_loc_next; |
32871 | } |
32872 | else |
32873 | { |
32874 | memcpy (dest: loc, src: l->expr, n: sizeof (dw_loc_descr_node)); |
32875 | add_loc_descr (list_head: &loc, descr: next); |
32876 | next = loc; |
32877 | } |
32878 | loc = prev; |
32879 | } |
32880 | return false; |
32881 | } |
32882 | |
32883 | /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */ |
32884 | |
32885 | static void |
32886 | resolve_variable_value (dw_die_ref die) |
32887 | { |
32888 | dw_attr_node *a; |
32889 | dw_loc_list_ref loc; |
32890 | unsigned ix; |
32891 | |
32892 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
32893 | switch (AT_class (a)) |
32894 | { |
32895 | case dw_val_class_loc: |
32896 | if (!resolve_variable_value_in_expr (a, loc: AT_loc (a))) |
32897 | break; |
32898 | /* FALLTHRU */ |
32899 | case dw_val_class_loc_list: |
32900 | loc = AT_loc_list (a); |
32901 | gcc_assert (loc); |
32902 | for (; loc; loc = loc->dw_loc_next) |
32903 | resolve_variable_value_in_expr (a, loc: loc->expr); |
32904 | break; |
32905 | default: |
32906 | break; |
32907 | } |
32908 | } |
32909 | |
32910 | /* Attempt to optimize DW_OP_GNU_variable_value refering to |
32911 | temporaries in the current function. */ |
32912 | |
32913 | static void |
32914 | resolve_variable_values (void) |
32915 | { |
32916 | if (!variable_value_hash || !current_function_decl) |
32917 | return; |
32918 | |
32919 | struct variable_value_struct *node |
32920 | = variable_value_hash->find_with_hash (comparable: current_function_decl, |
32921 | DECL_UID (current_function_decl)); |
32922 | |
32923 | if (node == NULL) |
32924 | return; |
32925 | |
32926 | unsigned int i; |
32927 | dw_die_ref die; |
32928 | FOR_EACH_VEC_SAFE_ELT (node->dies, i, die) |
32929 | resolve_variable_value (die); |
32930 | } |
32931 | |
32932 | /* Helper function for note_variable_value, handle one location |
32933 | expression. */ |
32934 | |
32935 | static void |
32936 | note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc) |
32937 | { |
32938 | for (; loc; loc = loc->dw_loc_next) |
32939 | if (loc->dw_loc_opc == DW_OP_GNU_variable_value |
32940 | && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref) |
32941 | { |
32942 | tree decl = loc->dw_loc_oprnd1.v.val_decl_ref; |
32943 | dw_die_ref ref = lookup_decl_die (decl); |
32944 | if (! ref && (flag_generate_lto || flag_generate_offload)) |
32945 | { |
32946 | /* ??? This is somewhat a hack because we do not create DIEs |
32947 | for variables not in BLOCK trees early but when generating |
32948 | early LTO output we need the dw_val_class_decl_ref to be |
32949 | fully resolved. For fat LTO objects we'd also like to |
32950 | undo this after LTO dwarf output. */ |
32951 | gcc_assert (DECL_CONTEXT (decl)); |
32952 | dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl)); |
32953 | gcc_assert (ctx != NULL); |
32954 | gen_decl_die (decl, NULL_TREE, NULL, context_die: ctx); |
32955 | ref = lookup_decl_die (decl); |
32956 | gcc_assert (ref != NULL); |
32957 | } |
32958 | if (ref) |
32959 | { |
32960 | loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref; |
32961 | loc->dw_loc_oprnd1.v.val_die_ref.die = ref; |
32962 | loc->dw_loc_oprnd1.v.val_die_ref.external = 0; |
32963 | continue; |
32964 | } |
32965 | if (VAR_P (decl) |
32966 | && DECL_CONTEXT (decl) |
32967 | && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL |
32968 | && lookup_decl_die (DECL_CONTEXT (decl))) |
32969 | { |
32970 | if (!variable_value_hash) |
32971 | variable_value_hash |
32972 | = hash_table<variable_value_hasher>::create_ggc (n: 10); |
32973 | |
32974 | tree fndecl = DECL_CONTEXT (decl); |
32975 | struct variable_value_struct *node; |
32976 | struct variable_value_struct **slot |
32977 | = variable_value_hash->find_slot_with_hash (comparable: fndecl, |
32978 | DECL_UID (fndecl), |
32979 | insert: INSERT); |
32980 | if (*slot == NULL) |
32981 | { |
32982 | node = ggc_cleared_alloc<variable_value_struct> (); |
32983 | node->decl_id = DECL_UID (fndecl); |
32984 | *slot = node; |
32985 | } |
32986 | else |
32987 | node = *slot; |
32988 | |
32989 | vec_safe_push (v&: node->dies, obj: die); |
32990 | } |
32991 | } |
32992 | } |
32993 | |
32994 | /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still |
32995 | with dw_val_class_decl_ref operand. */ |
32996 | |
32997 | static void |
32998 | note_variable_value (dw_die_ref die) |
32999 | { |
33000 | dw_die_ref c; |
33001 | dw_attr_node *a; |
33002 | dw_loc_list_ref loc; |
33003 | unsigned ix; |
33004 | |
33005 | FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a) |
33006 | switch (AT_class (a)) |
33007 | { |
33008 | case dw_val_class_loc_list: |
33009 | loc = AT_loc_list (a); |
33010 | gcc_assert (loc); |
33011 | if (!loc->noted_variable_value) |
33012 | { |
33013 | loc->noted_variable_value = 1; |
33014 | for (; loc; loc = loc->dw_loc_next) |
33015 | note_variable_value_in_expr (die, loc: loc->expr); |
33016 | } |
33017 | break; |
33018 | case dw_val_class_loc: |
33019 | note_variable_value_in_expr (die, loc: AT_loc (a)); |
33020 | break; |
33021 | default: |
33022 | break; |
33023 | } |
33024 | |
33025 | /* Mark children. */ |
33026 | FOR_EACH_CHILD (die, c, note_variable_value (c)); |
33027 | } |
33028 | |
33029 | /* Process DWARF dies for CTF generation. */ |
33030 | |
33031 | static void |
33032 | ctf_debug_do_cu (dw_die_ref die) |
33033 | { |
33034 | dw_die_ref c; |
33035 | |
33036 | if (!ctf_do_die (die)) |
33037 | return; |
33038 | |
33039 | FOR_EACH_CHILD (die, c, ctf_do_die (c)); |
33040 | } |
33041 | |
33042 | /* Perform any cleanups needed after the early debug generation pass |
33043 | has run. */ |
33044 | |
33045 | static void |
33046 | dwarf2out_early_finish (const char *filename) |
33047 | { |
33048 | comdat_type_node *ctnode; |
33049 | set_early_dwarf s; |
33050 | char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES]; |
33051 | |
33052 | /* PCH might result in DW_AT_producer string being restored from the |
33053 | header compilation, so always fill it with empty string initially |
33054 | and overwrite only here. */ |
33055 | dw_attr_node *producer = get_AT (die: comp_unit_die (), attr_kind: DW_AT_producer); |
33056 | |
33057 | if (dwarf_record_gcc_switches) |
33058 | producer_string = gen_producer_string (language_string: lang_hooks.name, |
33059 | options: save_decoded_options, |
33060 | options_count: save_decoded_options_count); |
33061 | else |
33062 | producer_string = concat (lang_hooks.name, " " , version_string, NULL); |
33063 | |
33064 | producer->dw_attr_val.v.val_str->refcount--; |
33065 | producer->dw_attr_val.v.val_str = find_AT_string (str: producer_string); |
33066 | |
33067 | /* Add the name for the main input file now. We delayed this from |
33068 | dwarf2out_init to avoid complications with PCH. */ |
33069 | add_filename_attribute (die: comp_unit_die (), name_string: remap_debug_filename (filename)); |
33070 | add_comp_dir_attribute (die: comp_unit_die ()); |
33071 | |
33072 | /* With LTO early dwarf was really finished at compile-time, so make |
33073 | sure to adjust the phase after annotating the LTRANS CU DIE. */ |
33074 | if (in_lto_p) |
33075 | { |
33076 | early_dwarf_finished = true; |
33077 | if (dump_file) |
33078 | { |
33079 | fprintf (stream: dump_file, format: "LTO EARLY DWARF for %s\n" , filename); |
33080 | print_die (die: comp_unit_die (), outfile: dump_file); |
33081 | } |
33082 | return; |
33083 | } |
33084 | |
33085 | /* Walk through the list of incomplete types again, trying once more to |
33086 | emit full debugging info for them. */ |
33087 | retry_incomplete_types (); |
33088 | |
33089 | gen_scheduled_generic_parms_dies (); |
33090 | gen_remaining_tmpl_value_param_die_attribute (); |
33091 | |
33092 | /* The point here is to flush out the limbo list so that it is empty |
33093 | and we don't need to stream it for LTO. */ |
33094 | flush_limbo_die_list (); |
33095 | |
33096 | /* Add DW_AT_linkage_name for all deferred DIEs. */ |
33097 | for (limbo_die_node *node = deferred_asm_name; node; node = node->next) |
33098 | { |
33099 | tree decl = node->created_for; |
33100 | if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl) |
33101 | /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that |
33102 | ended up in deferred_asm_name before we knew it was |
33103 | constant and never written to disk. */ |
33104 | && DECL_ASSEMBLER_NAME (decl)) |
33105 | { |
33106 | add_linkage_attr (die: node->die, decl); |
33107 | move_linkage_attr (die: node->die); |
33108 | } |
33109 | } |
33110 | deferred_asm_name = NULL; |
33111 | |
33112 | if (flag_eliminate_unused_debug_types) |
33113 | prune_unused_types (); |
33114 | |
33115 | /* Generate separate COMDAT sections for type DIEs. */ |
33116 | if (use_debug_types) |
33117 | { |
33118 | break_out_comdat_types (die: comp_unit_die ()); |
33119 | |
33120 | /* Each new type_unit DIE was added to the limbo die list when created. |
33121 | Since these have all been added to comdat_type_list, clear the |
33122 | limbo die list. */ |
33123 | limbo_die_list = NULL; |
33124 | |
33125 | /* For each new comdat type unit, copy declarations for incomplete |
33126 | types to make the new unit self-contained (i.e., no direct |
33127 | references to the main compile unit). */ |
33128 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33129 | copy_decls_for_unworthy_types (unit: ctnode->root_die); |
33130 | copy_decls_for_unworthy_types (unit: comp_unit_die ()); |
33131 | |
33132 | /* In the process of copying declarations from one unit to another, |
33133 | we may have left some declarations behind that are no longer |
33134 | referenced. Prune them. */ |
33135 | prune_unused_types (); |
33136 | } |
33137 | |
33138 | /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still |
33139 | with dw_val_class_decl_ref operand. */ |
33140 | note_variable_value (die: comp_unit_die ()); |
33141 | for (limbo_die_node *node = cu_die_list; node; node = node->next) |
33142 | note_variable_value (die: node->die); |
33143 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33144 | note_variable_value (die: ctnode->root_die); |
33145 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
33146 | note_variable_value (die: node->die); |
33147 | |
33148 | /* The AT_pubnames attribute needs to go in all skeleton dies, including |
33149 | both the main_cu and all skeleton TUs. Making this call unconditional |
33150 | would end up either adding a second copy of the AT_pubnames attribute, or |
33151 | requiring a special case in add_top_level_skeleton_die_attrs. */ |
33152 | if (!dwarf_split_debug_info) |
33153 | add_AT_pubnames (die: comp_unit_die ()); |
33154 | |
33155 | /* The early debug phase is now finished. */ |
33156 | early_dwarf_finished = true; |
33157 | if (dump_file) |
33158 | { |
33159 | fprintf (stream: dump_file, format: "EARLY DWARF for %s\n" , filename); |
33160 | print_die (die: comp_unit_die (), outfile: dump_file); |
33161 | } |
33162 | |
33163 | /* Generate CTF/BTF debug info. */ |
33164 | if ((ctf_debug_info_level > CTFINFO_LEVEL_NONE |
33165 | || btf_debuginfo_p ()) && lang_GNU_C ()) |
33166 | { |
33167 | ctf_debug_init (); |
33168 | ctf_debug_do_cu (die: comp_unit_die ()); |
33169 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
33170 | ctf_debug_do_cu (die: node->die); |
33171 | /* Post process the debug data in the CTF container if necessary. */ |
33172 | ctf_debug_init_postprocess (btf_debuginfo_p ()); |
33173 | |
33174 | ctf_debug_early_finish (filename); |
33175 | } |
33176 | |
33177 | /* Do not generate DWARF assembler now when not producing LTO bytecode. */ |
33178 | if ((!flag_generate_lto && !flag_generate_offload) |
33179 | /* FIXME: Disable debug info generation for (PE-)COFF targets since the |
33180 | copy_lto_debug_sections operation of the simple object support in |
33181 | libiberty is not implemented for them yet. */ |
33182 | || TARGET_PECOFF || TARGET_COFF) |
33183 | return; |
33184 | |
33185 | /* Now as we are going to output for LTO initialize sections and labels |
33186 | to the LTO variants. We don't need a random-seed postfix as other |
33187 | LTO sections as linking the LTO debug sections into one in a partial |
33188 | link is fine. */ |
33189 | init_sections_and_labels (early_lto_debug: true); |
33190 | |
33191 | /* The output below is modeled after dwarf2out_finish with all |
33192 | location related output removed and some LTO specific changes. |
33193 | Some refactoring might make both smaller and easier to match up. */ |
33194 | |
33195 | base_types.truncate (size: 0); |
33196 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33197 | mark_base_types (die: ctnode->root_die); |
33198 | mark_base_types (die: comp_unit_die ()); |
33199 | move_marked_base_types (); |
33200 | |
33201 | /* Traverse the DIE's and add sibling attributes to those DIE's |
33202 | that have children. */ |
33203 | add_sibling_attributes (die: comp_unit_die ()); |
33204 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
33205 | add_sibling_attributes (die: node->die); |
33206 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33207 | add_sibling_attributes (die: ctnode->root_die); |
33208 | |
33209 | /* AIX Assembler inserts the length, so adjust the reference to match the |
33210 | offset expected by debuggers. */ |
33211 | strcpy (dest: dl_section_ref, src: debug_line_section_label); |
33212 | if (XCOFF_DEBUGGING_INFO) |
33213 | strcat (dest: dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR); |
33214 | |
33215 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
33216 | add_AT_lineptr (die: comp_unit_die (), attr_kind: DW_AT_stmt_list, label: dl_section_ref); |
33217 | |
33218 | if (have_macinfo) |
33219 | add_AT_macptr (die: comp_unit_die (), DEBUG_MACRO_ATTRIBUTE, |
33220 | label: macinfo_section_label); |
33221 | |
33222 | save_macinfo_strings (); |
33223 | |
33224 | if (dwarf_split_debug_info) |
33225 | { |
33226 | unsigned int index = 0; |
33227 | debug_str_hash->traverse_noresize<unsigned int *, index_string> (argument: &index); |
33228 | } |
33229 | |
33230 | /* Output all of the compilation units. We put the main one last so that |
33231 | the offsets are available to output_pubnames. */ |
33232 | for (limbo_die_node *node = limbo_die_list; node; node = node->next) |
33233 | output_comp_unit (die: node->die, output_if_empty: 0, NULL); |
33234 | |
33235 | hash_table<comdat_type_hasher> comdat_type_table (100); |
33236 | for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next) |
33237 | { |
33238 | comdat_type_node **slot = comdat_type_table.find_slot (value: ctnode, insert: INSERT); |
33239 | |
33240 | /* Don't output duplicate types. */ |
33241 | if (*slot != HTAB_EMPTY_ENTRY) |
33242 | continue; |
33243 | |
33244 | /* Add a pointer to the line table for the main compilation unit |
33245 | so that the debugger can make sense of DW_AT_decl_file |
33246 | attributes. */ |
33247 | if (debug_info_level >= DINFO_LEVEL_TERSE) |
33248 | add_AT_lineptr (die: ctnode->root_die, attr_kind: DW_AT_stmt_list, |
33249 | label: (!dwarf_split_debug_info |
33250 | ? debug_line_section_label |
33251 | : debug_skeleton_line_section_label)); |
33252 | |
33253 | output_comdat_type_unit (node: ctnode, early_lto_debug: true); |
33254 | *slot = ctnode; |
33255 | } |
33256 | |
33257 | /* Stick a unique symbol to the main debuginfo section. */ |
33258 | compute_comp_unit_symbol (unit_die: comp_unit_die ()); |
33259 | |
33260 | /* Output the main compilation unit. We always need it if only for |
33261 | the CU symbol. */ |
33262 | output_comp_unit (die: comp_unit_die (), output_if_empty: true, NULL); |
33263 | |
33264 | /* Output the abbreviation table. */ |
33265 | if (vec_safe_length (v: abbrev_die_table) != 1) |
33266 | { |
33267 | switch_to_section (debug_abbrev_section); |
33268 | ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label); |
33269 | output_abbrev_section (); |
33270 | } |
33271 | |
33272 | /* Have to end the macro section. */ |
33273 | if (have_macinfo) |
33274 | { |
33275 | /* We have to save macinfo state if we need to output it again |
33276 | for the FAT part of the object. */ |
33277 | vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table; |
33278 | if (flag_fat_lto_objects) |
33279 | macinfo_table = macinfo_table->copy (); |
33280 | |
33281 | switch_to_section (debug_macinfo_section); |
33282 | ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label); |
33283 | output_macinfo (debug_line_label: debug_line_section_label, early_lto_debug: true); |
33284 | dw2_asm_output_data (1, 0, "End compilation unit" ); |
33285 | |
33286 | if (flag_fat_lto_objects) |
33287 | { |
33288 | vec_free (v&: macinfo_table); |
33289 | macinfo_table = saved_macinfo_table; |
33290 | } |
33291 | } |
33292 | |
33293 | /* Emit a skeleton debug_line section. */ |
33294 | switch_to_section (debug_line_section); |
33295 | ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label); |
33296 | output_line_info (prologue_only: true); |
33297 | |
33298 | /* If we emitted any indirect strings, output the string table too. */ |
33299 | if (debug_str_hash || skeleton_debug_str_hash) |
33300 | output_indirect_strings (); |
33301 | if (debug_line_str_hash) |
33302 | { |
33303 | switch_to_section (debug_line_str_section); |
33304 | const enum dwarf_form form = DW_FORM_line_strp; |
33305 | debug_line_str_hash->traverse<enum dwarf_form, |
33306 | output_indirect_string> (argument: form); |
33307 | } |
33308 | |
33309 | /* Switch back to the text section. */ |
33310 | switch_to_section (text_section); |
33311 | } |
33312 | |
33313 | /* Reset all state within dwarf2out.cc so that we can rerun the compiler |
33314 | within the same process. For use by toplev::finalize. */ |
33315 | |
33316 | void |
33317 | dwarf2out_cc_finalize (void) |
33318 | { |
33319 | last_var_location_insn = NULL; |
33320 | cached_next_real_insn = NULL; |
33321 | used_rtx_array = NULL; |
33322 | incomplete_types = NULL; |
33323 | debug_info_section = NULL; |
33324 | debug_skeleton_info_section = NULL; |
33325 | debug_abbrev_section = NULL; |
33326 | debug_skeleton_abbrev_section = NULL; |
33327 | debug_aranges_section = NULL; |
33328 | debug_addr_section = NULL; |
33329 | debug_macinfo_section = NULL; |
33330 | debug_line_section = NULL; |
33331 | debug_skeleton_line_section = NULL; |
33332 | debug_loc_section = NULL; |
33333 | debug_pubnames_section = NULL; |
33334 | debug_pubtypes_section = NULL; |
33335 | debug_str_section = NULL; |
33336 | debug_line_str_section = NULL; |
33337 | debug_str_dwo_section = NULL; |
33338 | debug_str_offsets_section = NULL; |
33339 | debug_ranges_section = NULL; |
33340 | debug_ranges_dwo_section = NULL; |
33341 | debug_frame_section = NULL; |
33342 | fde_vec = NULL; |
33343 | debug_str_hash = NULL; |
33344 | debug_line_str_hash = NULL; |
33345 | skeleton_debug_str_hash = NULL; |
33346 | dw2_string_counter = 0; |
33347 | have_multiple_function_sections = false; |
33348 | in_text_section_p = false; |
33349 | cold_text_section = NULL; |
33350 | last_text_label = NULL; |
33351 | last_cold_label = NULL; |
33352 | switch_text_ranges = NULL; |
33353 | switch_cold_ranges = NULL; |
33354 | current_unit_personality = NULL; |
33355 | |
33356 | early_dwarf = false; |
33357 | early_dwarf_finished = false; |
33358 | |
33359 | next_die_offset = 0; |
33360 | single_comp_unit_die = NULL; |
33361 | comdat_type_list = NULL; |
33362 | limbo_die_list = NULL; |
33363 | file_table = NULL; |
33364 | decl_die_table = NULL; |
33365 | common_block_die_table = NULL; |
33366 | decl_loc_table = NULL; |
33367 | call_arg_locations = NULL; |
33368 | call_arg_loc_last = NULL; |
33369 | call_site_count = -1; |
33370 | tail_call_site_count = -1; |
33371 | cached_dw_loc_list_table = NULL; |
33372 | abbrev_die_table = NULL; |
33373 | delete dwarf_proc_stack_usage_map; |
33374 | dwarf_proc_stack_usage_map = NULL; |
33375 | line_info_label_num = 0; |
33376 | cur_line_info_table = NULL; |
33377 | text_section_line_info = NULL; |
33378 | cold_text_section_line_info = NULL; |
33379 | separate_line_info = NULL; |
33380 | info_section_emitted = false; |
33381 | pubname_table = NULL; |
33382 | pubtype_table = NULL; |
33383 | macinfo_table = NULL; |
33384 | ranges_table = NULL; |
33385 | ranges_by_label = NULL; |
33386 | rnglist_idx = 0; |
33387 | have_location_lists = false; |
33388 | loclabel_num = 0; |
33389 | poc_label_num = 0; |
33390 | last_emitted_file = NULL; |
33391 | label_num = 0; |
33392 | tmpl_value_parm_die_table = NULL; |
33393 | generic_type_instances = NULL; |
33394 | frame_pointer_fb_offset = 0; |
33395 | frame_pointer_fb_offset_valid = false; |
33396 | base_types.release (); |
33397 | XDELETEVEC (producer_string); |
33398 | producer_string = NULL; |
33399 | output_line_info_generation = 0; |
33400 | init_sections_and_labels_generation = 0; |
33401 | } |
33402 | |
33403 | #include "gt-dwarf2out.h" |
33404 | |