1 | /* Output variables, constants and external declarations, for GNU compiler. |
2 | Copyright (C) 1987-2024 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | |
21 | /* This file handles generation of all the assembler code |
22 | *except* the instructions of a function. |
23 | This includes declarations of variables and their initial values. |
24 | |
25 | We also output the assembler code for constants stored in memory |
26 | and are responsible for combining constants with the same value. */ |
27 | |
28 | #include "config.h" |
29 | #include "system.h" |
30 | #include "coretypes.h" |
31 | #include "backend.h" |
32 | #include "target.h" |
33 | #include "rtl.h" |
34 | #include "tree.h" |
35 | #include "predict.h" |
36 | #include "memmodel.h" |
37 | #include "tm_p.h" |
38 | #include "stringpool.h" |
39 | #include "regs.h" |
40 | #include "emit-rtl.h" |
41 | #include "cgraph.h" |
42 | #include "diagnostic-core.h" |
43 | #include "fold-const.h" |
44 | #include "stor-layout.h" |
45 | #include "varasm.h" |
46 | #include "version.h" |
47 | #include "flags.h" |
48 | #include "stmt.h" |
49 | #include "expr.h" |
50 | #include "expmed.h" |
51 | #include "optabs.h" |
52 | #include "output.h" |
53 | #include "langhooks.h" |
54 | #include "debug.h" |
55 | #include "common/common-target.h" |
56 | #include "stringpool.h" |
57 | #include "attribs.h" |
58 | #include "asan.h" |
59 | #include "rtl-iter.h" |
60 | #include "file-prefix-map.h" /* remap_debug_filename() */ |
61 | #include "alloc-pool.h" |
62 | #include "toplev.h" |
63 | #include "opts.h" |
64 | #include "asan.h" |
65 | |
66 | /* The (assembler) name of the first globally-visible object output. */ |
67 | extern GTY(()) const char *first_global_object_name; |
68 | extern GTY(()) const char *weak_global_object_name; |
69 | |
70 | const char *first_global_object_name; |
71 | const char *weak_global_object_name; |
72 | |
73 | class addr_const; |
74 | class constant_descriptor_rtx; |
75 | struct rtx_constant_pool; |
76 | |
77 | #define n_deferred_constants (crtl->varasm.deferred_constants) |
78 | |
79 | /* Number for making the label on the next |
80 | constant that is stored in memory. */ |
81 | |
82 | static GTY(()) int const_labelno; |
83 | |
84 | /* Carry information from ASM_DECLARE_OBJECT_NAME |
85 | to ASM_FINISH_DECLARE_OBJECT. */ |
86 | |
87 | int size_directive_output; |
88 | |
89 | /* The last decl for which assemble_variable was called, |
90 | if it did ASM_DECLARE_OBJECT_NAME. |
91 | If the last call to assemble_variable didn't do that, |
92 | this holds 0. */ |
93 | |
94 | tree last_assemble_variable_decl; |
95 | |
96 | /* The following global variable indicates if the first basic block |
97 | in a function belongs to the cold partition or not. */ |
98 | |
99 | bool first_function_block_is_cold; |
100 | |
101 | /* Whether we saw any functions with no_split_stack. */ |
102 | |
103 | static bool saw_no_split_stack; |
104 | |
105 | static const char *strip_reg_name (const char *); |
106 | static bool contains_pointers_p (tree); |
107 | #ifdef ASM_OUTPUT_EXTERNAL |
108 | static bool incorporeal_function_p (tree); |
109 | #endif |
110 | static void decode_addr_const (tree, class addr_const *); |
111 | static hashval_t const_hash_1 (const tree); |
112 | static bool compare_constant (const tree, const tree); |
113 | static void output_constant_def_contents (rtx); |
114 | static void output_addressed_constants (tree, int); |
115 | static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT, |
116 | unsigned int, bool, bool); |
117 | static void globalize_decl (tree); |
118 | static bool decl_readonly_section_1 (enum section_category); |
119 | #ifdef BSS_SECTION_ASM_OP |
120 | #ifdef ASM_OUTPUT_ALIGNED_BSS |
121 | static void asm_output_aligned_bss (FILE *, tree, const char *, |
122 | unsigned HOST_WIDE_INT, int) |
123 | ATTRIBUTE_UNUSED; |
124 | #endif |
125 | #endif /* BSS_SECTION_ASM_OP */ |
126 | static void mark_weak (tree); |
127 | static void output_constant_pool (const char *, tree); |
128 | static void handle_vtv_comdat_section (section *, const_tree); |
129 | |
130 | /* Well-known sections, each one associated with some sort of *_ASM_OP. */ |
131 | section *text_section; |
132 | section *data_section; |
133 | section *readonly_data_section; |
134 | section *sdata_section; |
135 | section *ctors_section; |
136 | section *dtors_section; |
137 | section *bss_section; |
138 | section *sbss_section; |
139 | |
140 | /* Various forms of common section. All are guaranteed to be nonnull. */ |
141 | section *tls_comm_section; |
142 | section *comm_section; |
143 | section *lcomm_section; |
144 | |
145 | /* A SECTION_NOSWITCH section used for declaring global BSS variables. |
146 | May be null. */ |
147 | section *bss_noswitch_section; |
148 | |
149 | /* The section that holds the main exception table, when known. The section |
150 | is set either by the target's init_sections hook or by the first call to |
151 | switch_to_exception_section. */ |
152 | section *exception_section; |
153 | |
154 | /* The section that holds the DWARF2 frame unwind information, when known. |
155 | The section is set either by the target's init_sections hook or by the |
156 | first call to switch_to_eh_frame_section. */ |
157 | section *eh_frame_section; |
158 | |
159 | /* asm_out_file's current section. This is NULL if no section has yet |
160 | been selected or if we lose track of what the current section is. */ |
161 | section *in_section; |
162 | |
163 | /* True if code for the current function is currently being directed |
164 | at the cold section. */ |
165 | bool in_cold_section_p; |
166 | |
167 | /* The following global holds the "function name" for the code in the |
168 | cold section of a function, if hot/cold function splitting is enabled |
169 | and there was actually code that went into the cold section. A |
170 | pseudo function name is needed for the cold section of code for some |
171 | debugging tools that perform symbolization. */ |
172 | tree cold_function_name = NULL_TREE; |
173 | |
174 | /* A linked list of all the unnamed sections. */ |
175 | static GTY(()) section *unnamed_sections; |
176 | |
177 | /* Return a nonzero value if DECL has a section attribute. */ |
178 | #define IN_NAMED_SECTION(DECL) \ |
179 | (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL) |
180 | |
181 | struct section_hasher : ggc_ptr_hash<section> |
182 | { |
183 | typedef const char *compare_type; |
184 | |
185 | static hashval_t hash (section *); |
186 | static bool equal (section *, const char *); |
187 | }; |
188 | |
189 | /* Hash table of named sections. */ |
190 | static GTY(()) hash_table<section_hasher> *section_htab; |
191 | |
192 | struct object_block_hasher : ggc_ptr_hash<object_block> |
193 | { |
194 | typedef const section *compare_type; |
195 | |
196 | static hashval_t hash (object_block *); |
197 | static bool equal (object_block *, const section *); |
198 | }; |
199 | |
200 | /* A table of object_blocks, indexed by section. */ |
201 | static GTY(()) hash_table<object_block_hasher> *object_block_htab; |
202 | |
203 | /* The next number to use for internal anchor labels. */ |
204 | static GTY(()) int anchor_labelno; |
205 | |
206 | /* A pool of constants that can be shared between functions. */ |
207 | static GTY(()) struct rtx_constant_pool *shared_constant_pool; |
208 | |
209 | /* Helper routines for maintaining section_htab. */ |
210 | |
211 | bool |
212 | section_hasher::equal (section *old, const char *new_name) |
213 | { |
214 | return strcmp (s1: old->named.name, s2: new_name) == 0; |
215 | } |
216 | |
217 | hashval_t |
218 | section_hasher::hash (section *old) |
219 | { |
220 | return htab_hash_string (old->named.name); |
221 | } |
222 | |
223 | /* Return a hash value for section SECT. */ |
224 | |
225 | static hashval_t |
226 | hash_section (section *sect) |
227 | { |
228 | if (sect->common.flags & SECTION_NAMED) |
229 | return htab_hash_string (sect->named.name); |
230 | return sect->common.flags & ~SECTION_DECLARED; |
231 | } |
232 | |
233 | /* Helper routines for maintaining object_block_htab. */ |
234 | |
235 | inline bool |
236 | object_block_hasher::equal (object_block *old, const section *new_section) |
237 | { |
238 | return old->sect == new_section; |
239 | } |
240 | |
241 | hashval_t |
242 | object_block_hasher::hash (object_block *old) |
243 | { |
244 | return hash_section (sect: old->sect); |
245 | } |
246 | |
247 | /* Return a new unnamed section with the given fields. */ |
248 | |
249 | section * |
250 | get_unnamed_section (unsigned int flags, void (*callback) (const char *), |
251 | const char *data) |
252 | { |
253 | section *sect; |
254 | |
255 | sect = ggc_alloc<section> (); |
256 | sect->unnamed.common.flags = flags | SECTION_UNNAMED; |
257 | sect->unnamed.callback = callback; |
258 | sect->unnamed.data = data; |
259 | sect->unnamed.next = unnamed_sections; |
260 | |
261 | unnamed_sections = sect; |
262 | return sect; |
263 | } |
264 | |
265 | /* Return a SECTION_NOSWITCH section with the given fields. */ |
266 | |
267 | static section * |
268 | get_noswitch_section (unsigned int flags, noswitch_section_callback callback) |
269 | { |
270 | section *sect; |
271 | |
272 | sect = ggc_alloc<section> (); |
273 | sect->noswitch.common.flags = flags | SECTION_NOSWITCH; |
274 | sect->noswitch.callback = callback; |
275 | |
276 | return sect; |
277 | } |
278 | |
279 | /* Return the named section structure associated with NAME. Create |
280 | a new section with the given fields if no such structure exists. |
281 | When NOT_EXISTING, then fail if the section already exists. Return |
282 | the existing section if the SECTION_RETAIN bit doesn't match. Set |
283 | the SECTION_WRITE | SECTION_RELRO bits on the existing section |
284 | if one of the section flags is SECTION_WRITE | SECTION_RELRO and the |
285 | other has none of these flags in named sections and either the section |
286 | hasn't been declared yet or has been declared as writable. */ |
287 | |
288 | section * |
289 | get_section (const char *name, unsigned int flags, tree decl, |
290 | bool not_existing) |
291 | { |
292 | section *sect, **slot; |
293 | |
294 | slot = section_htab->find_slot_with_hash (comparable: name, hash: htab_hash_string (name), |
295 | insert: INSERT); |
296 | flags |= SECTION_NAMED; |
297 | if (decl != nullptr |
298 | && DECL_P (decl) |
299 | && lookup_attribute (attr_name: "retain" , DECL_ATTRIBUTES (decl))) |
300 | flags |= SECTION_RETAIN; |
301 | if (*slot == NULL) |
302 | { |
303 | sect = ggc_alloc<section> (); |
304 | sect->named.common.flags = flags; |
305 | sect->named.name = ggc_strdup (name); |
306 | sect->named.decl = decl; |
307 | *slot = sect; |
308 | } |
309 | else |
310 | { |
311 | if (not_existing) |
312 | internal_error ("section already exists: %qs" , name); |
313 | |
314 | sect = *slot; |
315 | /* It is fine if one of the sections has SECTION_NOTYPE as long as |
316 | the other has none of the contrary flags (see the logic at the end |
317 | of default_section_type_flags, below). */ |
318 | if (((sect->common.flags ^ flags) & SECTION_NOTYPE) |
319 | && !((sect->common.flags | flags) |
320 | & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE |
321 | | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0)))) |
322 | { |
323 | sect->common.flags |= SECTION_NOTYPE; |
324 | flags |= SECTION_NOTYPE; |
325 | } |
326 | if ((sect->common.flags & ~SECTION_DECLARED) != flags |
327 | && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0) |
328 | { |
329 | /* It is fine if one of the section flags is |
330 | SECTION_WRITE | SECTION_RELRO and the other has none of these |
331 | flags (i.e. read-only) in named sections and either the |
332 | section hasn't been declared yet or has been declared as writable. |
333 | In that case just make sure the resulting flags are |
334 | SECTION_WRITE | SECTION_RELRO, ie. writable only because of |
335 | relocations. */ |
336 | if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO)) |
337 | == (SECTION_WRITE | SECTION_RELRO) |
338 | && (sect->common.flags |
339 | & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO)) |
340 | == (flags & ~(SECTION_WRITE | SECTION_RELRO)) |
341 | && ((sect->common.flags & SECTION_DECLARED) == 0 |
342 | || (sect->common.flags & SECTION_WRITE))) |
343 | { |
344 | sect->common.flags |= (SECTION_WRITE | SECTION_RELRO); |
345 | return sect; |
346 | } |
347 | /* If the SECTION_RETAIN bit doesn't match, return and switch |
348 | to a new section later. */ |
349 | if ((sect->common.flags & SECTION_RETAIN) |
350 | != (flags & SECTION_RETAIN)) |
351 | return sect; |
352 | /* Sanity check user variables for flag changes. */ |
353 | if (sect->named.decl != NULL |
354 | && DECL_P (sect->named.decl) |
355 | && decl != sect->named.decl) |
356 | { |
357 | if (decl != NULL && DECL_P (decl)) |
358 | error ("%+qD causes a section type conflict with %qD" , |
359 | decl, sect->named.decl); |
360 | else |
361 | error ("section type conflict with %qD" , sect->named.decl); |
362 | inform (DECL_SOURCE_LOCATION (sect->named.decl), |
363 | "%qD was declared here" , sect->named.decl); |
364 | } |
365 | else if (decl != NULL && DECL_P (decl)) |
366 | error ("%+qD causes a section type conflict" , decl); |
367 | else |
368 | error ("section type conflict" ); |
369 | /* Make sure we don't error about one section multiple times. */ |
370 | sect->common.flags |= SECTION_OVERRIDE; |
371 | } |
372 | } |
373 | return sect; |
374 | } |
375 | |
376 | /* Return true if the current compilation mode benefits from having |
377 | objects grouped into blocks. */ |
378 | |
379 | static bool |
380 | use_object_blocks_p (void) |
381 | { |
382 | return flag_section_anchors; |
383 | } |
384 | |
385 | /* Return the object_block structure for section SECT. Create a new |
386 | structure if we haven't created one already. Return null if SECT |
387 | itself is null. Return also null for mergeable sections since |
388 | section anchors can't be used in mergeable sections anyway, |
389 | because the linker might move objects around, and using the |
390 | object blocks infrastructure in that case is both a waste and a |
391 | maintenance burden. */ |
392 | |
393 | static struct object_block * |
394 | get_block_for_section (section *sect) |
395 | { |
396 | struct object_block *block; |
397 | |
398 | if (sect == NULL) |
399 | return NULL; |
400 | |
401 | if (sect->common.flags & SECTION_MERGE) |
402 | return NULL; |
403 | |
404 | object_block **slot |
405 | = object_block_htab->find_slot_with_hash (comparable: sect, hash: hash_section (sect), |
406 | insert: INSERT); |
407 | block = *slot; |
408 | if (block == NULL) |
409 | { |
410 | block = ggc_cleared_alloc<object_block> (); |
411 | block->sect = sect; |
412 | *slot = block; |
413 | } |
414 | return block; |
415 | } |
416 | |
417 | /* Create a symbol with label LABEL and place it at byte offset |
418 | OFFSET in BLOCK. OFFSET can be negative if the symbol's offset |
419 | is not yet known. LABEL must be a garbage-collected string. */ |
420 | |
421 | static rtx |
422 | create_block_symbol (const char *label, struct object_block *block, |
423 | HOST_WIDE_INT offset) |
424 | { |
425 | rtx symbol; |
426 | unsigned int size; |
427 | |
428 | /* Create the extended SYMBOL_REF. */ |
429 | size = RTX_HDR_SIZE + sizeof (struct block_symbol); |
430 | symbol = (rtx) ggc_internal_alloc (s: size); |
431 | |
432 | /* Initialize the normal SYMBOL_REF fields. */ |
433 | memset (s: symbol, c: 0, n: size); |
434 | PUT_CODE (symbol, SYMBOL_REF); |
435 | PUT_MODE (x: symbol, Pmode); |
436 | XSTR (symbol, 0) = label; |
437 | SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO; |
438 | |
439 | /* Initialize the block_symbol stuff. */ |
440 | SYMBOL_REF_BLOCK (symbol) = block; |
441 | SYMBOL_REF_BLOCK_OFFSET (symbol) = offset; |
442 | |
443 | return symbol; |
444 | } |
445 | |
446 | /* Return a section with a particular name and with whatever SECTION_* |
447 | flags section_type_flags deems appropriate. The name of the section |
448 | is taken from NAME if nonnull, otherwise it is taken from DECL's |
449 | DECL_SECTION_NAME. DECL is the decl associated with the section |
450 | (see the section comment for details) and RELOC is as for |
451 | section_type_flags. */ |
452 | |
453 | section * |
454 | get_named_section (tree decl, const char *name, int reloc) |
455 | { |
456 | unsigned int flags; |
457 | |
458 | if (name == NULL) |
459 | { |
460 | gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl)); |
461 | name = DECL_SECTION_NAME (decl); |
462 | } |
463 | |
464 | flags = targetm.section_type_flags (decl, name, reloc); |
465 | return get_section (name, flags, decl); |
466 | } |
467 | |
468 | /* Worker for resolve_unique_section. */ |
469 | |
470 | static bool |
471 | set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED) |
472 | { |
473 | n->implicit_section = true; |
474 | return false; |
475 | } |
476 | |
477 | /* If required, set DECL_SECTION_NAME to a unique name. */ |
478 | |
479 | void |
480 | resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED, |
481 | int flag_function_or_data_sections) |
482 | { |
483 | if (DECL_SECTION_NAME (decl) == NULL |
484 | && targetm_common.have_named_sections |
485 | && (flag_function_or_data_sections |
486 | || lookup_attribute (attr_name: "retain" , DECL_ATTRIBUTES (decl)) |
487 | || DECL_COMDAT_GROUP (decl))) |
488 | { |
489 | targetm.asm_out.unique_section (decl, reloc); |
490 | if (DECL_SECTION_NAME (decl)) |
491 | symtab_node::get (decl)->call_for_symbol_and_aliases |
492 | (callback: set_implicit_section, NULL, include_overwritable: true); |
493 | } |
494 | } |
495 | |
496 | #ifdef BSS_SECTION_ASM_OP |
497 | |
498 | #ifdef ASM_OUTPUT_ALIGNED_BSS |
499 | |
500 | /* Utility function for targets to use in implementing |
501 | ASM_OUTPUT_ALIGNED_BSS. |
502 | ??? It is believed that this function will work in most cases so such |
503 | support is localized here. */ |
504 | |
505 | static void |
506 | asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED, |
507 | const char *name, unsigned HOST_WIDE_INT size, |
508 | int align) |
509 | { |
510 | switch_to_section (bss_section); |
511 | ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT)); |
512 | #ifdef ASM_DECLARE_OBJECT_NAME |
513 | last_assemble_variable_decl = decl; |
514 | ASM_DECLARE_OBJECT_NAME (file, name, decl); |
515 | #else |
516 | /* Standard thing is just output label for the object. */ |
517 | ASM_OUTPUT_LABEL (file, name); |
518 | #endif /* ASM_DECLARE_OBJECT_NAME */ |
519 | ASM_OUTPUT_SKIP (file, size ? size : 1); |
520 | } |
521 | |
522 | #endif |
523 | |
524 | #endif /* BSS_SECTION_ASM_OP */ |
525 | |
526 | #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS |
527 | /* Return the hot section for function DECL. Return text_section for |
528 | null DECLs. */ |
529 | |
530 | static section * |
531 | hot_function_section (tree decl) |
532 | { |
533 | if (decl != NULL_TREE |
534 | && DECL_SECTION_NAME (decl) != NULL |
535 | && targetm_common.have_named_sections) |
536 | return get_named_section (decl, NULL, reloc: 0); |
537 | else |
538 | return text_section; |
539 | } |
540 | #endif |
541 | |
542 | /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL) |
543 | is NULL. |
544 | |
545 | When DECL_SECTION_NAME is non-NULL and it is implicit section and |
546 | NAMED_SECTION_SUFFIX is non-NULL, then produce section called |
547 | concatenate the name with NAMED_SECTION_SUFFIX. |
548 | Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */ |
549 | |
550 | section * |
551 | get_named_text_section (tree decl, |
552 | const char *text_section_name, |
553 | const char *named_section_suffix) |
554 | { |
555 | if (decl && DECL_SECTION_NAME (decl)) |
556 | { |
557 | if (named_section_suffix) |
558 | { |
559 | const char *dsn = DECL_SECTION_NAME (decl); |
560 | const char *stripped_name; |
561 | char *name, *buffer; |
562 | |
563 | name = (char *) alloca (strlen (dsn) + 1); |
564 | memcpy (dest: name, src: dsn, |
565 | n: strlen (s: dsn) + 1); |
566 | |
567 | stripped_name = targetm.strip_name_encoding (name); |
568 | |
569 | buffer = ACONCAT ((stripped_name, named_section_suffix, NULL)); |
570 | return get_named_section (decl, name: buffer, reloc: 0); |
571 | } |
572 | else if (symtab_node::get (decl)->implicit_section) |
573 | { |
574 | const char *name; |
575 | |
576 | /* Do not try to split gnu_linkonce functions. This gets somewhat |
577 | slipperly. */ |
578 | if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP) |
579 | return NULL; |
580 | name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
581 | name = targetm.strip_name_encoding (name); |
582 | return get_named_section (decl, ACONCAT ((text_section_name, "." , |
583 | name, NULL)), reloc: 0); |
584 | } |
585 | else |
586 | return NULL; |
587 | } |
588 | return get_named_section (decl, name: text_section_name, reloc: 0); |
589 | } |
590 | |
591 | /* Choose named function section based on its frequency. */ |
592 | |
593 | section * |
594 | default_function_section (tree decl, enum node_frequency freq, |
595 | bool startup, bool exit) |
596 | { |
597 | #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG |
598 | /* Old GNU linkers have buggy --gc-section support, which sometimes |
599 | results in .gcc_except_table* sections being garbage collected. */ |
600 | if (decl |
601 | && symtab_node::get (decl)->implicit_section) |
602 | return NULL; |
603 | #endif |
604 | |
605 | if (!flag_reorder_functions |
606 | || !targetm_common.have_named_sections) |
607 | return NULL; |
608 | /* Startup code should go to startup subsection unless it is |
609 | unlikely executed (this happens especially with function splitting |
610 | where we can split away unnecessary parts of static constructors. */ |
611 | if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED) |
612 | { |
613 | /* During LTO the tp_first_run profiling will naturally place all |
614 | initialization code first. Using separate section is counter-productive |
615 | because startup only code may call functions which are no longer |
616 | startup only. */ |
617 | if (!in_lto_p |
618 | || !cgraph_node::get (decl)->tp_first_run |
619 | || !opt_for_fn (decl, flag_profile_reorder_functions)) |
620 | return get_named_text_section (decl, text_section_name: ".text.startup" , NULL); |
621 | else |
622 | return NULL; |
623 | } |
624 | |
625 | /* Similarly for exit. */ |
626 | if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED) |
627 | return get_named_text_section (decl, text_section_name: ".text.exit" , NULL); |
628 | |
629 | /* Group cold functions together, similarly for hot code. */ |
630 | switch (freq) |
631 | { |
632 | case NODE_FREQUENCY_UNLIKELY_EXECUTED: |
633 | return get_named_text_section (decl, text_section_name: ".text.unlikely" , NULL); |
634 | case NODE_FREQUENCY_HOT: |
635 | return get_named_text_section (decl, text_section_name: ".text.hot" , NULL); |
636 | /* FALLTHRU */ |
637 | default: |
638 | return NULL; |
639 | } |
640 | } |
641 | |
642 | /* Return the section for function DECL. |
643 | |
644 | If DECL is NULL_TREE, return the text section. We can be passed |
645 | NULL_TREE under some circumstances by dbxout.cc at least. |
646 | |
647 | If FORCE_COLD is true, return cold function section ignoring |
648 | the frequency info of cgraph_node. */ |
649 | |
650 | static section * |
651 | function_section_1 (tree decl, bool force_cold) |
652 | { |
653 | section *section = NULL; |
654 | enum node_frequency freq = NODE_FREQUENCY_NORMAL; |
655 | bool startup = false, exit = false; |
656 | |
657 | if (decl) |
658 | { |
659 | struct cgraph_node *node = cgraph_node::get (decl); |
660 | |
661 | if (node) |
662 | { |
663 | freq = node->frequency; |
664 | startup = node->only_called_at_startup; |
665 | exit = node->only_called_at_exit; |
666 | } |
667 | } |
668 | if (force_cold) |
669 | freq = NODE_FREQUENCY_UNLIKELY_EXECUTED; |
670 | |
671 | #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS |
672 | if (decl != NULL_TREE |
673 | && DECL_SECTION_NAME (decl) != NULL) |
674 | { |
675 | if (targetm.asm_out.function_section) |
676 | section = targetm.asm_out.function_section (decl, freq, |
677 | startup, exit); |
678 | if (section) |
679 | return section; |
680 | return get_named_section (decl, NULL, 0); |
681 | } |
682 | else |
683 | return targetm.asm_out.select_section |
684 | (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED, |
685 | symtab_node::get (decl)->definition_alignment ()); |
686 | #else |
687 | if (targetm.asm_out.function_section) |
688 | section = targetm.asm_out.function_section (decl, freq, startup, exit); |
689 | if (section) |
690 | return section; |
691 | return hot_function_section (decl); |
692 | #endif |
693 | } |
694 | |
695 | /* Return the section for function DECL. |
696 | |
697 | If DECL is NULL_TREE, return the text section. We can be passed |
698 | NULL_TREE under some circumstances by dbxout.cc at least. */ |
699 | |
700 | section * |
701 | function_section (tree decl) |
702 | { |
703 | /* Handle cases where function splitting code decides |
704 | to put function entry point into unlikely executed section |
705 | despite the fact that the function itself is not cold |
706 | (i.e. it is called rarely but contains a hot loop that is |
707 | better to live in hot subsection for the code locality). */ |
708 | return function_section_1 (decl, |
709 | force_cold: first_function_block_is_cold); |
710 | } |
711 | |
712 | /* Return the section for the current function, take IN_COLD_SECTION_P |
713 | into account. */ |
714 | |
715 | section * |
716 | current_function_section (void) |
717 | { |
718 | return function_section_1 (decl: current_function_decl, force_cold: in_cold_section_p); |
719 | } |
720 | |
721 | /* Tell assembler to switch to unlikely-to-be-executed text section. */ |
722 | |
723 | section * |
724 | unlikely_text_section (void) |
725 | { |
726 | return function_section_1 (decl: current_function_decl, force_cold: true); |
727 | } |
728 | |
729 | /* When called within a function context, return true if the function |
730 | has been assigned a cold text section and if SECT is that section. |
731 | When called outside a function context, return true if SECT is the |
732 | default cold section. */ |
733 | |
734 | bool |
735 | unlikely_text_section_p (section *sect) |
736 | { |
737 | return sect == function_section_1 (decl: current_function_decl, force_cold: true); |
738 | } |
739 | |
740 | /* Switch to the other function partition (if inside of hot section |
741 | into cold section, otherwise into the hot section). */ |
742 | |
743 | void |
744 | switch_to_other_text_partition (void) |
745 | { |
746 | in_cold_section_p = !in_cold_section_p; |
747 | switch_to_section (current_function_section ()); |
748 | } |
749 | |
750 | /* Return the read-only or relocated read-only data section |
751 | associated with function DECL. */ |
752 | |
753 | section * |
754 | default_function_rodata_section (tree decl, bool relocatable) |
755 | { |
756 | const char* sname; |
757 | unsigned int flags; |
758 | |
759 | flags = 0; |
760 | |
761 | if (relocatable) |
762 | { |
763 | sname = ".data.rel.ro.local" ; |
764 | flags = (SECTION_WRITE | SECTION_RELRO); |
765 | } |
766 | else |
767 | sname = ".rodata" ; |
768 | |
769 | if (decl && DECL_SECTION_NAME (decl)) |
770 | { |
771 | const char *name = DECL_SECTION_NAME (decl); |
772 | |
773 | if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP) |
774 | { |
775 | const char *dot; |
776 | size_t len; |
777 | char* rname; |
778 | |
779 | dot = strchr (s: name + 1, c: '.'); |
780 | if (!dot) |
781 | dot = name; |
782 | len = strlen (s: dot) + strlen (s: sname) + 1; |
783 | rname = (char *) alloca (len); |
784 | |
785 | strcpy (dest: rname, src: sname); |
786 | strcat (dest: rname, src: dot); |
787 | return get_section (name: rname, flags: (SECTION_LINKONCE | flags), decl); |
788 | } |
789 | /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo or |
790 | .gnu.linkonce.d.rel.ro.local.foo if the jump table is relocatable. */ |
791 | else if (DECL_COMDAT_GROUP (decl) |
792 | && startswith (str: name, prefix: ".gnu.linkonce.t." )) |
793 | { |
794 | size_t len; |
795 | char *rname; |
796 | |
797 | if (relocatable) |
798 | { |
799 | len = strlen (s: name) + strlen (s: ".rel.ro.local" ) + 1; |
800 | rname = (char *) alloca (len); |
801 | |
802 | strcpy (dest: rname, src: ".gnu.linkonce.d.rel.ro.local" ); |
803 | strcat (dest: rname, src: name + 15); |
804 | } |
805 | else |
806 | { |
807 | len = strlen (s: name) + 1; |
808 | rname = (char *) alloca (len); |
809 | |
810 | memcpy (dest: rname, src: name, n: len); |
811 | rname[14] = 'r'; |
812 | } |
813 | return get_section (name: rname, flags: (SECTION_LINKONCE | flags), decl); |
814 | } |
815 | /* For .text.foo we want to use .rodata.foo. */ |
816 | else if (flag_function_sections && flag_data_sections |
817 | && startswith (str: name, prefix: ".text." )) |
818 | { |
819 | size_t len = strlen (s: name) + 1; |
820 | char *rname = (char *) alloca (len + strlen (sname) - 5); |
821 | |
822 | memcpy (dest: rname, src: sname, n: strlen (s: sname)); |
823 | memcpy (dest: rname + strlen (s: sname), src: name + 5, n: len - 5); |
824 | return get_section (name: rname, flags, decl); |
825 | } |
826 | } |
827 | |
828 | if (relocatable) |
829 | return get_section (name: sname, flags, decl); |
830 | else |
831 | return readonly_data_section; |
832 | } |
833 | |
834 | /* Return the read-only data section associated with function DECL |
835 | for targets where that section should be always the single |
836 | readonly data section. */ |
837 | |
838 | section * |
839 | default_no_function_rodata_section (tree, bool) |
840 | { |
841 | return readonly_data_section; |
842 | } |
843 | |
844 | /* A subroutine of mergeable_string_section and mergeable_constant_section. */ |
845 | |
846 | static const char * |
847 | function_mergeable_rodata_prefix (void) |
848 | { |
849 | section *s = targetm.asm_out.function_rodata_section (current_function_decl, |
850 | false); |
851 | if (SECTION_STYLE (s) == SECTION_NAMED) |
852 | return s->named.name; |
853 | else |
854 | return targetm.asm_out.mergeable_rodata_prefix; |
855 | } |
856 | |
857 | /* Return the section to use for string merging. */ |
858 | |
859 | static section * |
860 | mergeable_string_section (tree decl ATTRIBUTE_UNUSED, |
861 | unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED, |
862 | unsigned int flags ATTRIBUTE_UNUSED) |
863 | { |
864 | HOST_WIDE_INT len; |
865 | |
866 | if (HAVE_GAS_SHF_MERGE && flag_merge_constants |
867 | && TREE_CODE (decl) == STRING_CST |
868 | && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE |
869 | && align <= 256 |
870 | && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0 |
871 | && TREE_STRING_LENGTH (decl) == len) |
872 | { |
873 | scalar_int_mode mode; |
874 | unsigned int modesize; |
875 | const char *str; |
876 | HOST_WIDE_INT i; |
877 | int j, unit; |
878 | const char *prefix = function_mergeable_rodata_prefix (); |
879 | char *name = (char *) alloca (strlen (prefix) + 30); |
880 | |
881 | mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl))); |
882 | modesize = GET_MODE_BITSIZE (mode); |
883 | if (modesize >= 8 && modesize <= 256 |
884 | && (modesize & (modesize - 1)) == 0) |
885 | { |
886 | if (align < modesize) |
887 | align = modesize; |
888 | |
889 | if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8) |
890 | return readonly_data_section; |
891 | |
892 | str = TREE_STRING_POINTER (decl); |
893 | unit = GET_MODE_SIZE (mode); |
894 | |
895 | /* Check for embedded NUL characters. */ |
896 | for (i = 0; i < len; i += unit) |
897 | { |
898 | for (j = 0; j < unit; j++) |
899 | if (str[i + j] != '\0') |
900 | break; |
901 | if (j == unit) |
902 | break; |
903 | } |
904 | if (i == len - unit || (unit == 1 && i == len)) |
905 | { |
906 | sprintf (s: name, format: "%s.str%d.%d" , prefix, |
907 | modesize / 8, (int) (align / 8)); |
908 | flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS; |
909 | return get_section (name, flags, NULL); |
910 | } |
911 | } |
912 | } |
913 | |
914 | return readonly_data_section; |
915 | } |
916 | |
917 | /* Return the section to use for constant merging. */ |
918 | |
919 | section * |
920 | mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED, |
921 | unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED, |
922 | unsigned int flags ATTRIBUTE_UNUSED) |
923 | { |
924 | if (HAVE_GAS_SHF_MERGE && flag_merge_constants |
925 | && mode != VOIDmode |
926 | && mode != BLKmode |
927 | && known_le (GET_MODE_BITSIZE (mode), align) |
928 | && align >= 8 |
929 | && align <= 256 |
930 | && (align & (align - 1)) == 0 |
931 | && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8)) |
932 | { |
933 | const char *prefix = function_mergeable_rodata_prefix (); |
934 | char *name = (char *) alloca (strlen (prefix) + 30); |
935 | |
936 | sprintf (s: name, format: "%s.cst%d" , prefix, (int) (align / 8)); |
937 | flags |= (align / 8) | SECTION_MERGE; |
938 | return get_section (name, flags, NULL); |
939 | } |
940 | return readonly_data_section; |
941 | } |
942 | |
943 | /* Given NAME, a putative register name, discard any customary prefixes. */ |
944 | |
945 | static const char * |
946 | strip_reg_name (const char *name) |
947 | { |
948 | #ifdef REGISTER_PREFIX |
949 | if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX))) |
950 | name += strlen (REGISTER_PREFIX); |
951 | #endif |
952 | if (name[0] == '%' || name[0] == '#') |
953 | name++; |
954 | return name; |
955 | } |
956 | |
957 | /* The user has asked for a DECL to have a particular name. Set (or |
958 | change) it in such a way that we don't prefix an underscore to |
959 | it. */ |
960 | void |
961 | set_user_assembler_name (tree decl, const char *name) |
962 | { |
963 | char *starred = (char *) alloca (strlen (name) + 2); |
964 | starred[0] = '*'; |
965 | strcpy (dest: starred + 1, src: name); |
966 | symtab->change_decl_assembler_name (decl, get_identifier (starred)); |
967 | SET_DECL_RTL (decl, NULL_RTX); |
968 | } |
969 | |
970 | /* Decode an `asm' spec for a declaration as a register name. |
971 | Return the register number, or -1 if nothing specified, |
972 | or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized, |
973 | or -3 if ASMSPEC is `cc' and is not recognized, |
974 | or -4 if ASMSPEC is `memory' and is not recognized. |
975 | Accept an exact spelling or a decimal number. |
976 | Prefixes such as % are optional. */ |
977 | |
978 | int |
979 | decode_reg_name_and_count (const char *asmspec, int *pnregs) |
980 | { |
981 | /* Presume just one register is clobbered. */ |
982 | *pnregs = 1; |
983 | |
984 | if (asmspec != 0) |
985 | { |
986 | int i; |
987 | |
988 | /* Get rid of confusing prefixes. */ |
989 | asmspec = strip_reg_name (name: asmspec); |
990 | |
991 | /* Allow a decimal number as a "register name". */ |
992 | for (i = strlen (s: asmspec) - 1; i >= 0; i--) |
993 | if (! ISDIGIT (asmspec[i])) |
994 | break; |
995 | if (asmspec[0] != 0 && i < 0) |
996 | { |
997 | i = atoi (nptr: asmspec); |
998 | if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0]) |
999 | return i; |
1000 | else |
1001 | return -2; |
1002 | } |
1003 | |
1004 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
1005 | if (reg_names[i][0] |
1006 | && ! strcmp (s1: asmspec, s2: strip_reg_name (reg_names[i]))) |
1007 | return i; |
1008 | |
1009 | #ifdef OVERLAPPING_REGISTER_NAMES |
1010 | { |
1011 | static const struct |
1012 | { |
1013 | const char *const name; |
1014 | const int number; |
1015 | const int nregs; |
1016 | } table[] = OVERLAPPING_REGISTER_NAMES; |
1017 | |
1018 | for (i = 0; i < (int) ARRAY_SIZE (table); i++) |
1019 | if (table[i].name[0] |
1020 | && ! strcmp (asmspec, table[i].name)) |
1021 | { |
1022 | *pnregs = table[i].nregs; |
1023 | return table[i].number; |
1024 | } |
1025 | } |
1026 | #endif /* OVERLAPPING_REGISTER_NAMES */ |
1027 | |
1028 | #ifdef ADDITIONAL_REGISTER_NAMES |
1029 | { |
1030 | static const struct { const char *const name; const int number; } table[] |
1031 | = ADDITIONAL_REGISTER_NAMES; |
1032 | |
1033 | for (i = 0; i < (int) ARRAY_SIZE (table); i++) |
1034 | if (table[i].name[0] |
1035 | && ! strcmp (s1: asmspec, s2: table[i].name) |
1036 | && reg_names[table[i].number][0]) |
1037 | return table[i].number; |
1038 | } |
1039 | #endif /* ADDITIONAL_REGISTER_NAMES */ |
1040 | |
1041 | if (!strcmp (s1: asmspec, s2: "memory" )) |
1042 | return -4; |
1043 | |
1044 | if (!strcmp (s1: asmspec, s2: "cc" )) |
1045 | return -3; |
1046 | |
1047 | return -2; |
1048 | } |
1049 | |
1050 | return -1; |
1051 | } |
1052 | |
1053 | int |
1054 | decode_reg_name (const char *name) |
1055 | { |
1056 | int count; |
1057 | return decode_reg_name_and_count (asmspec: name, pnregs: &count); |
1058 | } |
1059 | |
1060 | |
1061 | /* Return true if DECL's initializer is suitable for a BSS section. */ |
1062 | |
1063 | bool |
1064 | bss_initializer_p (const_tree decl, bool named) |
1065 | { |
1066 | /* Do not put non-common constants into the .bss section, they belong in |
1067 | a readonly section, except when NAMED is true. */ |
1068 | return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named) |
1069 | && (DECL_INITIAL (decl) == NULL |
1070 | /* In LTO we have no errors in program; error_mark_node is used |
1071 | to mark offlined constructors. */ |
1072 | || (DECL_INITIAL (decl) == error_mark_node |
1073 | && !in_lto_p) |
1074 | || (flag_zero_initialized_in_bss |
1075 | && initializer_zerop (DECL_INITIAL (decl)) |
1076 | /* A decl with the "persistent" attribute applied and |
1077 | explicitly initialized to 0 should not be treated as a BSS |
1078 | variable. */ |
1079 | && !DECL_PERSISTENT_P (decl)))); |
1080 | } |
1081 | |
1082 | /* Compute the alignment of variable specified by DECL. |
1083 | DONT_OUTPUT_DATA is from assemble_variable. */ |
1084 | |
1085 | void |
1086 | align_variable (tree decl, bool dont_output_data) |
1087 | { |
1088 | unsigned int align = DECL_ALIGN (decl); |
1089 | |
1090 | /* In the case for initialing an array whose length isn't specified, |
1091 | where we have not yet been able to do the layout, |
1092 | figure out the proper alignment now. */ |
1093 | if (dont_output_data && DECL_SIZE (decl) == 0 |
1094 | && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) |
1095 | align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))); |
1096 | |
1097 | /* Some object file formats have a maximum alignment which they support. |
1098 | In particular, a.out format supports a maximum alignment of 4. */ |
1099 | if (align > MAX_OFILE_ALIGNMENT) |
1100 | { |
1101 | error ("alignment of %q+D is greater than maximum object " |
1102 | "file alignment %d" , decl, |
1103 | MAX_OFILE_ALIGNMENT/BITS_PER_UNIT); |
1104 | align = MAX_OFILE_ALIGNMENT; |
1105 | } |
1106 | |
1107 | if (! DECL_USER_ALIGN (decl)) |
1108 | { |
1109 | #ifdef DATA_ABI_ALIGNMENT |
1110 | unsigned int data_abi_align |
1111 | = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align); |
1112 | /* For backwards compatibility, don't assume the ABI alignment for |
1113 | TLS variables. */ |
1114 | if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD) |
1115 | align = data_abi_align; |
1116 | #endif |
1117 | |
1118 | /* On some machines, it is good to increase alignment sometimes. |
1119 | But as DECL_ALIGN is used both for actually emitting the variable |
1120 | and for code accessing the variable as guaranteed alignment, we |
1121 | can only increase the alignment if it is a performance optimization |
1122 | if the references to it must bind to the current definition. */ |
1123 | if (decl_binds_to_current_def_p (decl) |
1124 | && !DECL_VIRTUAL_P (decl)) |
1125 | { |
1126 | #ifdef DATA_ALIGNMENT |
1127 | unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align); |
1128 | /* Don't increase alignment too much for TLS variables - TLS space |
1129 | is too precious. */ |
1130 | if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD) |
1131 | align = data_align; |
1132 | #endif |
1133 | if (DECL_INITIAL (decl) != 0 |
1134 | /* In LTO we have no errors in program; error_mark_node is used |
1135 | to mark offlined constructors. */ |
1136 | && (in_lto_p || DECL_INITIAL (decl) != error_mark_node)) |
1137 | { |
1138 | unsigned int const_align |
1139 | = targetm.constant_alignment (DECL_INITIAL (decl), align); |
1140 | /* Don't increase alignment too much for TLS variables - TLS |
1141 | space is too precious. */ |
1142 | if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD) |
1143 | align = const_align; |
1144 | } |
1145 | } |
1146 | } |
1147 | |
1148 | /* Reset the alignment in case we have made it tighter, so we can benefit |
1149 | from it in get_pointer_alignment. */ |
1150 | SET_DECL_ALIGN (decl, align); |
1151 | } |
1152 | |
1153 | /* Return DECL_ALIGN (decl), possibly increased for optimization purposes |
1154 | beyond what align_variable returned. */ |
1155 | |
1156 | static unsigned int |
1157 | get_variable_align (tree decl) |
1158 | { |
1159 | unsigned int align = DECL_ALIGN (decl); |
1160 | |
1161 | /* For user aligned vars or static vars align_variable already did |
1162 | everything. */ |
1163 | if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl)) |
1164 | return align; |
1165 | |
1166 | #ifdef DATA_ABI_ALIGNMENT |
1167 | if (DECL_THREAD_LOCAL_P (decl)) |
1168 | align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align); |
1169 | #endif |
1170 | |
1171 | /* For decls that bind to the current definition, align_variable |
1172 | did also everything, except for not assuming ABI required alignment |
1173 | of TLS variables. For other vars, increase the alignment here |
1174 | as an optimization. */ |
1175 | if (!decl_binds_to_current_def_p (decl)) |
1176 | { |
1177 | /* On some machines, it is good to increase alignment sometimes. */ |
1178 | #ifdef DATA_ALIGNMENT |
1179 | unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align); |
1180 | /* Don't increase alignment too much for TLS variables - TLS space |
1181 | is too precious. */ |
1182 | if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD) |
1183 | align = data_align; |
1184 | #endif |
1185 | if (DECL_INITIAL (decl) != 0 |
1186 | /* In LTO we have no errors in program; error_mark_node is used |
1187 | to mark offlined constructors. */ |
1188 | && (in_lto_p || DECL_INITIAL (decl) != error_mark_node)) |
1189 | { |
1190 | unsigned int const_align |
1191 | = targetm.constant_alignment (DECL_INITIAL (decl), align); |
1192 | /* Don't increase alignment too much for TLS variables - TLS space |
1193 | is too precious. */ |
1194 | if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD) |
1195 | align = const_align; |
1196 | } |
1197 | } |
1198 | |
1199 | return align; |
1200 | } |
1201 | |
1202 | /* Compute reloc for get_variable_section. The return value |
1203 | is a mask for which bit 1 indicates a global relocation, and bit 0 |
1204 | indicates a local relocation. */ |
1205 | |
1206 | int |
1207 | compute_reloc_for_var (tree decl) |
1208 | { |
1209 | int reloc; |
1210 | |
1211 | if (DECL_INITIAL (decl) == error_mark_node) |
1212 | reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0; |
1213 | else if (DECL_INITIAL (decl)) |
1214 | reloc = compute_reloc_for_constant (DECL_INITIAL (decl)); |
1215 | else |
1216 | reloc = 0; |
1217 | |
1218 | return reloc; |
1219 | } |
1220 | |
1221 | /* Return the section into which the given VAR_DECL or CONST_DECL |
1222 | should be placed. PREFER_NOSWITCH_P is true if a noswitch |
1223 | section should be used wherever possible. */ |
1224 | |
1225 | section * |
1226 | get_variable_section (tree decl, bool prefer_noswitch_p) |
1227 | { |
1228 | addr_space_t as = ADDR_SPACE_GENERIC; |
1229 | int reloc; |
1230 | varpool_node *vnode = varpool_node::get (decl); |
1231 | if (vnode) |
1232 | { |
1233 | vnode = vnode->ultimate_alias_target (); |
1234 | decl = vnode->decl; |
1235 | } |
1236 | |
1237 | if (TREE_TYPE (decl) != error_mark_node) |
1238 | as = TYPE_ADDR_SPACE (TREE_TYPE (decl)); |
1239 | |
1240 | /* We need the constructor to figure out reloc flag. */ |
1241 | if (vnode) |
1242 | vnode->get_constructor (); |
1243 | |
1244 | if (DECL_COMMON (decl) |
1245 | && !lookup_attribute (attr_name: "retain" , DECL_ATTRIBUTES (decl))) |
1246 | { |
1247 | /* If the decl has been given an explicit section name, or it resides |
1248 | in a non-generic address space, then it isn't common, and shouldn't |
1249 | be handled as such. */ |
1250 | gcc_assert (DECL_SECTION_NAME (decl) == NULL |
1251 | && ADDR_SPACE_GENERIC_P (as)); |
1252 | if (DECL_THREAD_LOCAL_P (decl)) |
1253 | return tls_comm_section; |
1254 | else if (TREE_PUBLIC (decl) && bss_initializer_p (decl)) |
1255 | return comm_section; |
1256 | } |
1257 | |
1258 | reloc = compute_reloc_for_var (decl); |
1259 | |
1260 | resolve_unique_section (decl, reloc, flag_data_sections); |
1261 | if (IN_NAMED_SECTION (decl)) |
1262 | { |
1263 | section *sect = get_named_section (decl, NULL, reloc); |
1264 | |
1265 | if ((sect->common.flags & SECTION_BSS) |
1266 | && !bss_initializer_p (decl, named: true)) |
1267 | { |
1268 | error_at (DECL_SOURCE_LOCATION (decl), |
1269 | "only zero initializers are allowed in section %qs" , |
1270 | sect->named.name); |
1271 | DECL_INITIAL (decl) = error_mark_node; |
1272 | } |
1273 | return sect; |
1274 | } |
1275 | |
1276 | if (ADDR_SPACE_GENERIC_P (as) |
1277 | && !DECL_THREAD_LOCAL_P (decl) |
1278 | && !DECL_NOINIT_P (decl) |
1279 | && !(prefer_noswitch_p && targetm.have_switchable_bss_sections) |
1280 | && bss_initializer_p (decl)) |
1281 | { |
1282 | if (!TREE_PUBLIC (decl) |
1283 | && !((flag_sanitize & SANITIZE_ADDRESS) |
1284 | && asan_protect_global (decl))) |
1285 | return lcomm_section; |
1286 | if (bss_noswitch_section) |
1287 | return bss_noswitch_section; |
1288 | } |
1289 | |
1290 | return targetm.asm_out.select_section (decl, reloc, |
1291 | get_variable_align (decl)); |
1292 | } |
1293 | |
1294 | /* Return the block into which object_block DECL should be placed. */ |
1295 | |
1296 | static struct object_block * |
1297 | get_block_for_decl (tree decl) |
1298 | { |
1299 | section *sect; |
1300 | |
1301 | if (VAR_P (decl)) |
1302 | { |
1303 | /* The object must be defined in this translation unit. */ |
1304 | if (DECL_EXTERNAL (decl)) |
1305 | return NULL; |
1306 | |
1307 | /* There's no point using object blocks for something that is |
1308 | isolated by definition. */ |
1309 | if (DECL_COMDAT_GROUP (decl)) |
1310 | return NULL; |
1311 | } |
1312 | |
1313 | /* We can only calculate block offsets if the decl has a known |
1314 | constant size. */ |
1315 | if (DECL_SIZE_UNIT (decl) == NULL) |
1316 | return NULL; |
1317 | if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))) |
1318 | return NULL; |
1319 | |
1320 | /* Find out which section should contain DECL. We cannot put it into |
1321 | an object block if it requires a standalone definition. */ |
1322 | if (VAR_P (decl)) |
1323 | align_variable (decl, dont_output_data: 0); |
1324 | sect = get_variable_section (decl, prefer_noswitch_p: true); |
1325 | if (SECTION_STYLE (sect) == SECTION_NOSWITCH) |
1326 | return NULL; |
1327 | |
1328 | if (bool (lookup_attribute (attr_name: "retain" , DECL_ATTRIBUTES (decl))) |
1329 | != bool (sect->common.flags & SECTION_RETAIN)) |
1330 | return NULL; |
1331 | |
1332 | return get_block_for_section (sect); |
1333 | } |
1334 | |
1335 | /* Make sure block symbol SYMBOL is in block BLOCK. */ |
1336 | |
1337 | static void |
1338 | change_symbol_block (rtx symbol, struct object_block *block) |
1339 | { |
1340 | if (block != SYMBOL_REF_BLOCK (symbol)) |
1341 | { |
1342 | gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0); |
1343 | SYMBOL_REF_BLOCK (symbol) = block; |
1344 | } |
1345 | } |
1346 | |
1347 | /* Return true if it is possible to put DECL in an object_block. */ |
1348 | |
1349 | static bool |
1350 | use_blocks_for_decl_p (tree decl) |
1351 | { |
1352 | struct symtab_node *snode; |
1353 | |
1354 | /* Don't create object blocks if each DECL is placed into a separate |
1355 | section because that will uselessly create a section anchor for |
1356 | each DECL. */ |
1357 | if (flag_data_sections) |
1358 | return false; |
1359 | |
1360 | /* Only data DECLs can be placed into object blocks. */ |
1361 | if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL) |
1362 | return false; |
1363 | |
1364 | /* DECL_INITIAL (decl) set to decl is a hack used for some decls that |
1365 | are never used from code directly and we never want object block handling |
1366 | for those. */ |
1367 | if (DECL_INITIAL (decl) == decl) |
1368 | return false; |
1369 | |
1370 | /* If this decl is an alias, then we don't want to emit a |
1371 | definition. */ |
1372 | if (VAR_P (decl) |
1373 | && (snode = symtab_node::get (decl)) != NULL |
1374 | && snode->alias) |
1375 | return false; |
1376 | |
1377 | return targetm.use_blocks_for_decl_p (decl); |
1378 | } |
1379 | |
1380 | /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS |
1381 | until we find an identifier that is not itself a transparent alias. |
1382 | Modify the alias passed to it by reference (and all aliases on the |
1383 | way to the ultimate target), such that they do not have to be |
1384 | followed again, and return the ultimate target of the alias |
1385 | chain. */ |
1386 | |
1387 | static inline tree |
1388 | ultimate_transparent_alias_target (tree *alias) |
1389 | { |
1390 | tree target = *alias; |
1391 | |
1392 | if (IDENTIFIER_TRANSPARENT_ALIAS (target)) |
1393 | { |
1394 | gcc_assert (TREE_CHAIN (target)); |
1395 | target = ultimate_transparent_alias_target (alias: &TREE_CHAIN (target)); |
1396 | gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target) |
1397 | && ! TREE_CHAIN (target)); |
1398 | *alias = target; |
1399 | } |
1400 | |
1401 | return target; |
1402 | } |
1403 | |
1404 | /* Return true if REGNUM is mentioned in ELIMINABLE_REGS as a from |
1405 | register number. */ |
1406 | |
1407 | static bool |
1408 | eliminable_regno_p (int regnum) |
1409 | { |
1410 | static const struct |
1411 | { |
1412 | const int from; |
1413 | const int to; |
1414 | } eliminables[] = ELIMINABLE_REGS; |
1415 | for (size_t i = 0; i < ARRAY_SIZE (eliminables); i++) |
1416 | if (regnum == eliminables[i].from) |
1417 | return true; |
1418 | return false; |
1419 | } |
1420 | |
1421 | /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should |
1422 | have static storage duration. In other words, it should not be an |
1423 | automatic variable, including PARM_DECLs. |
1424 | |
1425 | There is, however, one exception: this function handles variables |
1426 | explicitly placed in a particular register by the user. |
1427 | |
1428 | This is never called for PARM_DECL nodes. */ |
1429 | |
1430 | void |
1431 | make_decl_rtl (tree decl) |
1432 | { |
1433 | const char *name = 0; |
1434 | int reg_number; |
1435 | tree id; |
1436 | rtx x; |
1437 | |
1438 | /* Check that we are not being given an automatic variable. */ |
1439 | gcc_assert (TREE_CODE (decl) != PARM_DECL |
1440 | && TREE_CODE (decl) != RESULT_DECL); |
1441 | |
1442 | /* A weak alias has TREE_PUBLIC set but not the other bits. */ |
1443 | gcc_assert (!VAR_P (decl) |
1444 | || TREE_STATIC (decl) |
1445 | || TREE_PUBLIC (decl) |
1446 | || DECL_EXTERNAL (decl) |
1447 | || DECL_REGISTER (decl)); |
1448 | |
1449 | /* And that we were not given a type or a label. */ |
1450 | gcc_assert (TREE_CODE (decl) != TYPE_DECL |
1451 | && TREE_CODE (decl) != LABEL_DECL); |
1452 | |
1453 | /* For a duplicate declaration, we can be called twice on the |
1454 | same DECL node. Don't discard the RTL already made. */ |
1455 | if (DECL_RTL_SET_P (decl)) |
1456 | { |
1457 | /* If the old RTL had the wrong mode, fix the mode. */ |
1458 | x = DECL_RTL (decl); |
1459 | if (GET_MODE (x) != DECL_MODE (decl)) |
1460 | SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0)); |
1461 | |
1462 | if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl)) |
1463 | return; |
1464 | |
1465 | /* ??? Another way to do this would be to maintain a hashed |
1466 | table of such critters. Instead of adding stuff to a DECL |
1467 | to give certain attributes to it, we could use an external |
1468 | hash map from DECL to set of attributes. */ |
1469 | |
1470 | /* Let the target reassign the RTL if it wants. |
1471 | This is necessary, for example, when one machine specific |
1472 | decl attribute overrides another. */ |
1473 | targetm.encode_section_info (decl, DECL_RTL (decl), false); |
1474 | |
1475 | /* If the symbol has a SYMBOL_REF_BLOCK field, update it based |
1476 | on the new decl information. */ |
1477 | if (MEM_P (x) |
1478 | && GET_CODE (XEXP (x, 0)) == SYMBOL_REF |
1479 | && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0))) |
1480 | change_symbol_block (XEXP (x, 0), block: get_block_for_decl (decl)); |
1481 | |
1482 | return; |
1483 | } |
1484 | |
1485 | /* If this variable belongs to the global constant pool, retrieve the |
1486 | pre-computed RTL or recompute it in LTO mode. */ |
1487 | if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) |
1488 | { |
1489 | SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1)); |
1490 | return; |
1491 | } |
1492 | |
1493 | id = DECL_ASSEMBLER_NAME (decl); |
1494 | name = IDENTIFIER_POINTER (id); |
1495 | |
1496 | if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL |
1497 | && DECL_REGISTER (decl)) |
1498 | { |
1499 | error ("register name not specified for %q+D" , decl); |
1500 | } |
1501 | else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl)) |
1502 | { |
1503 | const char *asmspec = name+1; |
1504 | machine_mode mode = DECL_MODE (decl); |
1505 | reg_number = decode_reg_name (name: asmspec); |
1506 | /* First detect errors in declaring global registers. */ |
1507 | if (reg_number == -1) |
1508 | error ("register name not specified for %q+D" , decl); |
1509 | else if (reg_number < 0) |
1510 | error ("invalid register name for %q+D" , decl); |
1511 | else if (mode == BLKmode) |
1512 | error ("data type of %q+D isn%'t suitable for a register" , |
1513 | decl); |
1514 | else if (!in_hard_reg_set_p (accessible_reg_set, mode, regno: reg_number)) |
1515 | error ("the register specified for %q+D cannot be accessed" |
1516 | " by the current target" , decl); |
1517 | else if (!in_hard_reg_set_p (operand_reg_set, mode, regno: reg_number)) |
1518 | error ("the register specified for %q+D is not general enough" |
1519 | " to be used as a register variable" , decl); |
1520 | else if (!targetm.hard_regno_mode_ok (reg_number, mode)) |
1521 | error ("register specified for %q+D isn%'t suitable for data type" , |
1522 | decl); |
1523 | else if (reg_number != HARD_FRAME_POINTER_REGNUM |
1524 | && (reg_number == FRAME_POINTER_REGNUM |
1525 | #ifdef RETURN_ADDRESS_POINTER_REGNUM |
1526 | || reg_number == RETURN_ADDRESS_POINTER_REGNUM |
1527 | #endif |
1528 | || reg_number == ARG_POINTER_REGNUM) |
1529 | && eliminable_regno_p (regnum: reg_number)) |
1530 | error ("register specified for %q+D is an internal GCC " |
1531 | "implementation detail" , decl); |
1532 | /* Now handle properly declared static register variables. */ |
1533 | else |
1534 | { |
1535 | int nregs; |
1536 | |
1537 | if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl)) |
1538 | { |
1539 | DECL_INITIAL (decl) = 0; |
1540 | error ("global register variable has initial value" ); |
1541 | } |
1542 | if (TREE_THIS_VOLATILE (decl)) |
1543 | warning (OPT_Wvolatile_register_var, |
1544 | "optimization may eliminate reads and/or " |
1545 | "writes to register variables" ); |
1546 | |
1547 | /* If the user specified one of the eliminables registers here, |
1548 | e.g., FRAME_POINTER_REGNUM, we don't want to get this variable |
1549 | confused with that register and be eliminated. This usage is |
1550 | somewhat suspect... */ |
1551 | |
1552 | SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number)); |
1553 | ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number; |
1554 | REG_USERVAR_P (DECL_RTL (decl)) = 1; |
1555 | |
1556 | if (TREE_STATIC (decl)) |
1557 | { |
1558 | /* Make this register global, so not usable for anything |
1559 | else. */ |
1560 | #ifdef ASM_DECLARE_REGISTER_GLOBAL |
1561 | name = IDENTIFIER_POINTER (DECL_NAME (decl)); |
1562 | ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name); |
1563 | #endif |
1564 | nregs = hard_regno_nregs (regno: reg_number, mode); |
1565 | while (nregs > 0) |
1566 | globalize_reg (decl, reg_number + --nregs); |
1567 | } |
1568 | |
1569 | /* As a register variable, it has no section. */ |
1570 | return; |
1571 | } |
1572 | /* Avoid internal errors from invalid register |
1573 | specifications. */ |
1574 | SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE); |
1575 | DECL_HARD_REGISTER (decl) = 0; |
1576 | /* Also avoid SSA inconsistencies by pretending this is an external |
1577 | decl now. */ |
1578 | DECL_EXTERNAL (decl) = 1; |
1579 | return; |
1580 | } |
1581 | /* Now handle ordinary static variables and functions (in memory). |
1582 | Also handle vars declared register invalidly. */ |
1583 | else if (name[0] == '*') |
1584 | { |
1585 | #ifdef REGISTER_PREFIX |
1586 | if (strlen (REGISTER_PREFIX) != 0) |
1587 | { |
1588 | reg_number = decode_reg_name (name); |
1589 | if (reg_number >= 0 || reg_number == -3) |
1590 | error ("register name given for non-register variable %q+D" , decl); |
1591 | } |
1592 | #endif |
1593 | } |
1594 | |
1595 | /* Specifying a section attribute on a variable forces it into a |
1596 | non-.bss section, and thus it cannot be common. */ |
1597 | /* FIXME: In general this code should not be necessary because |
1598 | visibility pass is doing the same work. But notice_global_symbol |
1599 | is called early and it needs to make DECL_RTL to get the name. |
1600 | we take care of recomputing the DECL_RTL after visibility is changed. */ |
1601 | if (VAR_P (decl) |
1602 | && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)) |
1603 | && DECL_SECTION_NAME (decl) != NULL |
1604 | && DECL_INITIAL (decl) == NULL_TREE |
1605 | && DECL_COMMON (decl)) |
1606 | DECL_COMMON (decl) = 0; |
1607 | |
1608 | /* Variables can't be both common and weak. */ |
1609 | if (VAR_P (decl) && DECL_WEAK (decl)) |
1610 | DECL_COMMON (decl) = 0; |
1611 | |
1612 | if (use_object_blocks_p () && use_blocks_for_decl_p (decl)) |
1613 | x = create_block_symbol (label: name, block: get_block_for_decl (decl), offset: -1); |
1614 | else |
1615 | { |
1616 | machine_mode address_mode = Pmode; |
1617 | if (TREE_TYPE (decl) != error_mark_node) |
1618 | { |
1619 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl)); |
1620 | address_mode = targetm.addr_space.address_mode (as); |
1621 | } |
1622 | x = gen_rtx_SYMBOL_REF (address_mode, name); |
1623 | } |
1624 | SYMBOL_REF_WEAK (x) = DECL_WEAK (decl); |
1625 | SET_SYMBOL_REF_DECL (x, decl); |
1626 | |
1627 | x = gen_rtx_MEM (DECL_MODE (decl), x); |
1628 | if (TREE_CODE (decl) != FUNCTION_DECL) |
1629 | set_mem_attributes (x, decl, 1); |
1630 | SET_DECL_RTL (decl, x); |
1631 | |
1632 | /* Optionally set flags or add text to the name to record information |
1633 | such as that it is a function name. |
1634 | If the name is changed, the macro ASM_OUTPUT_LABELREF |
1635 | will have to know how to strip this information. */ |
1636 | targetm.encode_section_info (decl, DECL_RTL (decl), true); |
1637 | } |
1638 | |
1639 | /* Like make_decl_rtl, but inhibit creation of new alias sets when |
1640 | calling make_decl_rtl. Also, reset DECL_RTL before returning the |
1641 | rtl. */ |
1642 | |
1643 | rtx |
1644 | make_decl_rtl_for_debug (tree decl) |
1645 | { |
1646 | unsigned int save_aliasing_flag; |
1647 | rtx rtl; |
1648 | |
1649 | if (DECL_RTL_SET_P (decl)) |
1650 | return DECL_RTL (decl); |
1651 | |
1652 | /* Kludge alert! Somewhere down the call chain, make_decl_rtl will |
1653 | call new_alias_set. If running with -fcompare-debug, sometimes |
1654 | we do not want to create alias sets that will throw the alias |
1655 | numbers off in the comparison dumps. So... clearing |
1656 | flag_strict_aliasing will keep new_alias_set() from creating a |
1657 | new set. */ |
1658 | save_aliasing_flag = flag_strict_aliasing; |
1659 | flag_strict_aliasing = 0; |
1660 | |
1661 | rtl = DECL_RTL (decl); |
1662 | /* Reset DECL_RTL back, as various parts of the compiler expects |
1663 | DECL_RTL set meaning it is actually going to be output. */ |
1664 | SET_DECL_RTL (decl, NULL); |
1665 | |
1666 | flag_strict_aliasing = save_aliasing_flag; |
1667 | return rtl; |
1668 | } |
1669 | |
1670 | /* Output a string of literal assembler code |
1671 | for an `asm' keyword used between functions. */ |
1672 | |
1673 | void |
1674 | assemble_asm (tree string) |
1675 | { |
1676 | const char *p; |
1677 | app_enable (); |
1678 | |
1679 | if (TREE_CODE (string) == ADDR_EXPR) |
1680 | string = TREE_OPERAND (string, 0); |
1681 | |
1682 | p = TREE_STRING_POINTER (string); |
1683 | fprintf (stream: asm_out_file, format: "%s%s\n" , p[0] == '\t' ? "" : "\t" , p); |
1684 | } |
1685 | |
1686 | /* Write the address of the entity given by SYMBOL to SEC. */ |
1687 | void |
1688 | assemble_addr_to_section (rtx symbol, section *sec) |
1689 | { |
1690 | switch_to_section (sec); |
1691 | assemble_align (POINTER_SIZE); |
1692 | assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1); |
1693 | } |
1694 | |
1695 | /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if |
1696 | not) section for PRIORITY. */ |
1697 | section * |
1698 | get_cdtor_priority_section (int priority, bool constructor_p) |
1699 | { |
1700 | /* Buffer conservatively large enough for the full range of a 32-bit |
1701 | int plus the text below. */ |
1702 | char buf[18]; |
1703 | |
1704 | /* ??? This only works reliably with the GNU linker. */ |
1705 | sprintf (s: buf, format: "%s.%.5u" , |
1706 | constructor_p ? ".ctors" : ".dtors" , |
1707 | /* Invert the numbering so the linker puts us in the proper |
1708 | order; constructors are run from right to left, and the |
1709 | linker sorts in increasing order. */ |
1710 | MAX_INIT_PRIORITY - priority); |
1711 | return get_section (name: buf, flags: SECTION_WRITE, NULL); |
1712 | } |
1713 | |
1714 | void |
1715 | default_named_section_asm_out_destructor (rtx symbol, int priority) |
1716 | { |
1717 | section *sec; |
1718 | |
1719 | if (priority != DEFAULT_INIT_PRIORITY) |
1720 | sec = get_cdtor_priority_section (priority, |
1721 | /*constructor_p=*/false); |
1722 | else |
1723 | sec = get_section (name: ".dtors" , flags: SECTION_WRITE, NULL); |
1724 | |
1725 | assemble_addr_to_section (symbol, sec); |
1726 | } |
1727 | |
1728 | #ifdef DTORS_SECTION_ASM_OP |
1729 | void |
1730 | default_dtor_section_asm_out_destructor (rtx symbol, |
1731 | int priority ATTRIBUTE_UNUSED) |
1732 | { |
1733 | assemble_addr_to_section (symbol, dtors_section); |
1734 | } |
1735 | #endif |
1736 | |
1737 | void |
1738 | default_named_section_asm_out_constructor (rtx symbol, int priority) |
1739 | { |
1740 | section *sec; |
1741 | |
1742 | if (priority != DEFAULT_INIT_PRIORITY) |
1743 | sec = get_cdtor_priority_section (priority, |
1744 | /*constructor_p=*/true); |
1745 | else |
1746 | sec = get_section (name: ".ctors" , flags: SECTION_WRITE, NULL); |
1747 | |
1748 | assemble_addr_to_section (symbol, sec); |
1749 | } |
1750 | |
1751 | #ifdef CTORS_SECTION_ASM_OP |
1752 | void |
1753 | default_ctor_section_asm_out_constructor (rtx symbol, |
1754 | int priority ATTRIBUTE_UNUSED) |
1755 | { |
1756 | assemble_addr_to_section (symbol, ctors_section); |
1757 | } |
1758 | #endif |
1759 | |
1760 | /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with |
1761 | a nonzero value if the constant pool should be output before the |
1762 | start of the function, or a zero value if the pool should output |
1763 | after the end of the function. The default is to put it before the |
1764 | start. */ |
1765 | |
1766 | #ifndef CONSTANT_POOL_BEFORE_FUNCTION |
1767 | #define CONSTANT_POOL_BEFORE_FUNCTION 1 |
1768 | #endif |
1769 | |
1770 | /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going |
1771 | to be output to assembler. |
1772 | Set first_global_object_name and weak_global_object_name as appropriate. */ |
1773 | |
1774 | void |
1775 | notice_global_symbol (tree decl) |
1776 | { |
1777 | const char **t = &first_global_object_name; |
1778 | |
1779 | if (first_global_object_name |
1780 | || !TREE_PUBLIC (decl) |
1781 | || DECL_EXTERNAL (decl) |
1782 | || !DECL_NAME (decl) |
1783 | || (VAR_P (decl) && DECL_HARD_REGISTER (decl)) |
1784 | || (TREE_CODE (decl) != FUNCTION_DECL |
1785 | && (!VAR_P (decl) |
1786 | || (DECL_COMMON (decl) |
1787 | && (DECL_INITIAL (decl) == 0 |
1788 | || DECL_INITIAL (decl) == error_mark_node))))) |
1789 | return; |
1790 | |
1791 | /* We win when global object is found, but it is useful to know about weak |
1792 | symbol as well so we can produce nicer unique names. */ |
1793 | if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib) |
1794 | t = &weak_global_object_name; |
1795 | |
1796 | if (!*t) |
1797 | { |
1798 | tree id = DECL_ASSEMBLER_NAME (decl); |
1799 | ultimate_transparent_alias_target (alias: &id); |
1800 | *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id))); |
1801 | } |
1802 | } |
1803 | |
1804 | /* If not using flag_reorder_blocks_and_partition, decide early whether the |
1805 | current function goes into the cold section, so that targets can use |
1806 | current_function_section during RTL expansion. DECL describes the |
1807 | function. */ |
1808 | |
1809 | void |
1810 | decide_function_section (tree decl) |
1811 | { |
1812 | first_function_block_is_cold = false; |
1813 | |
1814 | if (DECL_SECTION_NAME (decl)) |
1815 | { |
1816 | struct cgraph_node *node = cgraph_node::get (decl: current_function_decl); |
1817 | /* Calls to function_section rely on first_function_block_is_cold |
1818 | being accurate. */ |
1819 | first_function_block_is_cold = (node |
1820 | && node->frequency |
1821 | == NODE_FREQUENCY_UNLIKELY_EXECUTED); |
1822 | } |
1823 | |
1824 | in_cold_section_p = first_function_block_is_cold; |
1825 | } |
1826 | |
1827 | /* Get the function's name, as described by its RTL. This may be |
1828 | different from the DECL_NAME name used in the source file. */ |
1829 | const char * |
1830 | get_fnname_from_decl (tree decl) |
1831 | { |
1832 | rtx x = DECL_RTL (decl); |
1833 | gcc_assert (MEM_P (x)); |
1834 | x = XEXP (x, 0); |
1835 | gcc_assert (GET_CODE (x) == SYMBOL_REF); |
1836 | return XSTR (x, 0); |
1837 | } |
1838 | |
1839 | /* Output function label, possibly with accompanying metadata. No additional |
1840 | code or data is output after the label. */ |
1841 | |
1842 | void |
1843 | assemble_function_label_raw (FILE *file, const char *name) |
1844 | { |
1845 | ASM_OUTPUT_LABEL (file, name); |
1846 | assemble_function_label_final (); |
1847 | } |
1848 | |
1849 | /* Finish outputting function label. Needs to be called when outputting |
1850 | function label without using assemble_function_label_raw (). */ |
1851 | |
1852 | void |
1853 | assemble_function_label_final (void) |
1854 | { |
1855 | if ((flag_sanitize & SANITIZE_ADDRESS) |
1856 | /* Notify ASAN only about the first function label. */ |
1857 | && (in_cold_section_p == first_function_block_is_cold) |
1858 | /* Do not notify ASAN when called from, e.g., code_end (). */ |
1859 | && cfun) |
1860 | asan_function_start (); |
1861 | } |
1862 | |
1863 | /* Output assembler code for the constant pool of a function and associated |
1864 | with defining the name of the function. DECL describes the function. |
1865 | NAME is the function's name. For the constant pool, we use the current |
1866 | constant pool data. */ |
1867 | |
1868 | void |
1869 | assemble_start_function (tree decl, const char *fnname) |
1870 | { |
1871 | int align; |
1872 | char tmp_label[100]; |
1873 | bool hot_label_written = false; |
1874 | |
1875 | if (crtl->has_bb_partition) |
1876 | { |
1877 | ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB" , const_labelno); |
1878 | crtl->subsections.hot_section_label = ggc_strdup (tmp_label); |
1879 | ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB" , const_labelno); |
1880 | crtl->subsections.cold_section_label = ggc_strdup (tmp_label); |
1881 | ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE" , const_labelno); |
1882 | crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label); |
1883 | ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE" , const_labelno); |
1884 | crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label); |
1885 | const_labelno++; |
1886 | cold_function_name = NULL_TREE; |
1887 | } |
1888 | else |
1889 | { |
1890 | crtl->subsections.hot_section_label = NULL; |
1891 | crtl->subsections.cold_section_label = NULL; |
1892 | crtl->subsections.hot_section_end_label = NULL; |
1893 | crtl->subsections.cold_section_end_label = NULL; |
1894 | } |
1895 | |
1896 | /* The following code does not need preprocessing in the assembler. */ |
1897 | |
1898 | app_disable (); |
1899 | |
1900 | if (CONSTANT_POOL_BEFORE_FUNCTION) |
1901 | output_constant_pool (fnname, decl); |
1902 | |
1903 | align = symtab_node::get (decl)->definition_alignment (); |
1904 | |
1905 | /* Make sure the not and cold text (code) sections are properly |
1906 | aligned. This is necessary here in the case where the function |
1907 | has both hot and cold sections, because we don't want to re-set |
1908 | the alignment when the section switch happens mid-function. */ |
1909 | |
1910 | if (crtl->has_bb_partition) |
1911 | { |
1912 | first_function_block_is_cold = false; |
1913 | |
1914 | switch_to_section (unlikely_text_section ()); |
1915 | assemble_align (align); |
1916 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label); |
1917 | |
1918 | /* When the function starts with a cold section, we need to explicitly |
1919 | align the hot section and write out the hot section label. |
1920 | But if the current function is a thunk, we do not have a CFG. */ |
1921 | if (!cfun->is_thunk |
1922 | && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION) |
1923 | { |
1924 | switch_to_section (text_section); |
1925 | assemble_align (align); |
1926 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label); |
1927 | hot_label_written = true; |
1928 | first_function_block_is_cold = true; |
1929 | } |
1930 | in_cold_section_p = first_function_block_is_cold; |
1931 | } |
1932 | |
1933 | |
1934 | /* Switch to the correct text section for the start of the function. */ |
1935 | |
1936 | switch_to_section (function_section (decl), decl); |
1937 | if (crtl->has_bb_partition && !hot_label_written) |
1938 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label); |
1939 | |
1940 | /* Tell assembler to move to target machine's alignment for functions. */ |
1941 | align = floor_log2 (x: align / BITS_PER_UNIT); |
1942 | /* Handle forced alignment. This really ought to apply to all functions, |
1943 | since it is used by patchable entries. */ |
1944 | if (flag_min_function_alignment) |
1945 | align = MAX (align, floor_log2 (flag_min_function_alignment)); |
1946 | |
1947 | if (align > 0) |
1948 | { |
1949 | ASM_OUTPUT_ALIGN (asm_out_file, align); |
1950 | } |
1951 | |
1952 | /* Handle a user-specified function alignment. |
1953 | Note that we still need to align to DECL_ALIGN, as above, |
1954 | because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */ |
1955 | if (! DECL_USER_ALIGN (decl) |
1956 | && align_functions.levels[0].log > align |
1957 | && optimize_function_for_speed_p (cfun)) |
1958 | { |
1959 | #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN |
1960 | int align_log = align_functions.levels[0].log; |
1961 | #endif |
1962 | int max_skip = align_functions.levels[0].maxskip; |
1963 | if (flag_limit_function_alignment && crtl->max_insn_address > 0 |
1964 | && max_skip >= crtl->max_insn_address) |
1965 | max_skip = crtl->max_insn_address - 1; |
1966 | |
1967 | #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN |
1968 | ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip); |
1969 | if (max_skip == align_functions.levels[0].maxskip) |
1970 | ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, |
1971 | align_functions.levels[1].log, |
1972 | align_functions.levels[1].maxskip); |
1973 | #else |
1974 | ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log); |
1975 | #endif |
1976 | } |
1977 | |
1978 | #ifdef ASM_OUTPUT_FUNCTION_PREFIX |
1979 | ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname); |
1980 | #endif |
1981 | |
1982 | if (!DECL_IGNORED_P (decl)) |
1983 | (*debug_hooks->begin_function) (decl); |
1984 | |
1985 | /* Make function name accessible from other files, if appropriate. */ |
1986 | |
1987 | if (TREE_PUBLIC (decl)) |
1988 | { |
1989 | notice_global_symbol (decl); |
1990 | |
1991 | globalize_decl (decl); |
1992 | |
1993 | maybe_assemble_visibility (decl); |
1994 | } |
1995 | |
1996 | if (DECL_PRESERVE_P (decl)) |
1997 | targetm.asm_out.mark_decl_preserved (fnname); |
1998 | |
1999 | unsigned short patch_area_size = crtl->patch_area_size; |
2000 | unsigned short patch_area_entry = crtl->patch_area_entry; |
2001 | |
2002 | /* Emit the patching area before the entry label, if any. */ |
2003 | if (patch_area_entry > 0) |
2004 | targetm.asm_out.print_patchable_function_entry (asm_out_file, |
2005 | patch_area_entry, true); |
2006 | |
2007 | /* Do any machine/system dependent processing of the function name. */ |
2008 | #ifdef ASM_DECLARE_FUNCTION_NAME |
2009 | ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl); |
2010 | #else |
2011 | /* Standard thing is just output label for the function. */ |
2012 | ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl); |
2013 | #endif /* ASM_DECLARE_FUNCTION_NAME */ |
2014 | |
2015 | /* And the area after the label. Record it if we haven't done so yet. */ |
2016 | if (patch_area_size > patch_area_entry) |
2017 | targetm.asm_out.print_patchable_function_entry (asm_out_file, |
2018 | patch_area_size |
2019 | - patch_area_entry, |
2020 | patch_area_entry == 0); |
2021 | |
2022 | if (lookup_attribute (attr_name: "no_split_stack" , DECL_ATTRIBUTES (decl))) |
2023 | saw_no_split_stack = true; |
2024 | } |
2025 | |
2026 | /* Output assembler code associated with defining the size of the |
2027 | function. DECL describes the function. NAME is the function's name. */ |
2028 | |
2029 | void |
2030 | assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED) |
2031 | { |
2032 | #ifdef ASM_DECLARE_FUNCTION_SIZE |
2033 | /* We could have switched section in the middle of the function. */ |
2034 | if (crtl->has_bb_partition) |
2035 | switch_to_section (function_section (decl)); |
2036 | ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl); |
2037 | #endif |
2038 | if (! CONSTANT_POOL_BEFORE_FUNCTION) |
2039 | { |
2040 | output_constant_pool (fnname, decl); |
2041 | switch_to_section (function_section (decl)); /* need to switch back */ |
2042 | } |
2043 | /* Output labels for end of hot/cold text sections (to be used by |
2044 | debug info.) */ |
2045 | if (crtl->has_bb_partition) |
2046 | { |
2047 | section *save_text_section; |
2048 | |
2049 | save_text_section = in_section; |
2050 | switch_to_section (unlikely_text_section ()); |
2051 | #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE |
2052 | if (cold_function_name != NULL_TREE) |
2053 | ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file, |
2054 | IDENTIFIER_POINTER (cold_function_name), |
2055 | decl); |
2056 | #endif |
2057 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label); |
2058 | if (first_function_block_is_cold) |
2059 | switch_to_section (text_section); |
2060 | else |
2061 | switch_to_section (function_section (decl)); |
2062 | ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label); |
2063 | switch_to_section (save_text_section); |
2064 | } |
2065 | } |
2066 | |
2067 | /* Assemble code to leave SIZE bytes of zeros. */ |
2068 | |
2069 | void |
2070 | assemble_zeros (unsigned HOST_WIDE_INT size) |
2071 | { |
2072 | /* Do no output if -fsyntax-only. */ |
2073 | if (flag_syntax_only) |
2074 | return; |
2075 | |
2076 | #ifdef ASM_NO_SKIP_IN_TEXT |
2077 | /* The `space' pseudo in the text section outputs nop insns rather than 0s, |
2078 | so we must output 0s explicitly in the text section. */ |
2079 | if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0) |
2080 | { |
2081 | unsigned HOST_WIDE_INT i; |
2082 | for (i = 0; i < size; i++) |
2083 | assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1); |
2084 | } |
2085 | else |
2086 | #endif |
2087 | if (size > 0) |
2088 | ASM_OUTPUT_SKIP (asm_out_file, size); |
2089 | } |
2090 | |
2091 | /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */ |
2092 | |
2093 | void |
2094 | assemble_align (unsigned int align) |
2095 | { |
2096 | if (align > BITS_PER_UNIT) |
2097 | { |
2098 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); |
2099 | } |
2100 | } |
2101 | |
2102 | /* Assemble a string constant with the specified C string as contents. */ |
2103 | |
2104 | void |
2105 | assemble_string (const char *p, int size) |
2106 | { |
2107 | int pos = 0; |
2108 | int maximum = 2000; |
2109 | |
2110 | /* If the string is very long, split it up. */ |
2111 | |
2112 | while (pos < size) |
2113 | { |
2114 | int thissize = size - pos; |
2115 | if (thissize > maximum) |
2116 | thissize = maximum; |
2117 | |
2118 | ASM_OUTPUT_ASCII (asm_out_file, p, thissize); |
2119 | |
2120 | pos += thissize; |
2121 | p += thissize; |
2122 | } |
2123 | } |
2124 | |
2125 | |
2126 | /* A noswitch_section_callback for lcomm_section. */ |
2127 | |
2128 | static bool |
2129 | emit_local (tree decl ATTRIBUTE_UNUSED, |
2130 | const char *name ATTRIBUTE_UNUSED, |
2131 | unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, |
2132 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) |
2133 | { |
2134 | #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL |
2135 | unsigned int align = symtab_node::get (decl)->definition_alignment (); |
2136 | ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name, |
2137 | size, align); |
2138 | return true; |
2139 | #elif defined ASM_OUTPUT_ALIGNED_LOCAL |
2140 | unsigned int align = symtab_node::get (decl)->definition_alignment (); |
2141 | ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align); |
2142 | return true; |
2143 | #else |
2144 | ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); |
2145 | return false; |
2146 | #endif |
2147 | } |
2148 | |
2149 | /* A noswitch_section_callback for bss_noswitch_section. */ |
2150 | |
2151 | #if defined ASM_OUTPUT_ALIGNED_BSS |
2152 | static bool |
2153 | emit_bss (tree decl ATTRIBUTE_UNUSED, |
2154 | const char *name ATTRIBUTE_UNUSED, |
2155 | unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, |
2156 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) |
2157 | { |
2158 | ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size, |
2159 | get_variable_align (decl)); |
2160 | return true; |
2161 | } |
2162 | #endif |
2163 | |
2164 | /* A noswitch_section_callback for comm_section. */ |
2165 | |
2166 | static bool |
2167 | emit_common (tree decl ATTRIBUTE_UNUSED, |
2168 | const char *name ATTRIBUTE_UNUSED, |
2169 | unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, |
2170 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) |
2171 | { |
2172 | #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON |
2173 | ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name, |
2174 | size, get_variable_align (decl)); |
2175 | return true; |
2176 | #elif defined ASM_OUTPUT_ALIGNED_COMMON |
2177 | ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size, |
2178 | get_variable_align (decl)); |
2179 | return true; |
2180 | #else |
2181 | ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded); |
2182 | return false; |
2183 | #endif |
2184 | } |
2185 | |
2186 | /* A noswitch_section_callback for tls_comm_section. */ |
2187 | |
2188 | static bool |
2189 | emit_tls_common (tree decl ATTRIBUTE_UNUSED, |
2190 | const char *name ATTRIBUTE_UNUSED, |
2191 | unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, |
2192 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) |
2193 | { |
2194 | #ifdef ASM_OUTPUT_TLS_COMMON |
2195 | ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size); |
2196 | return true; |
2197 | #else |
2198 | sorry ("thread-local COMMON data not implemented" ); |
2199 | return true; |
2200 | #endif |
2201 | } |
2202 | |
2203 | /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT. |
2204 | NAME is the name of DECL's SYMBOL_REF. */ |
2205 | |
2206 | static void |
2207 | assemble_noswitch_variable (tree decl, const char *name, section *sect, |
2208 | unsigned int align) |
2209 | { |
2210 | unsigned HOST_WIDE_INT size, rounded; |
2211 | |
2212 | size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); |
2213 | rounded = size; |
2214 | |
2215 | if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl)) |
2216 | size += asan_red_zone_size (size); |
2217 | |
2218 | /* Don't allocate zero bytes of common, |
2219 | since that means "undefined external" in the linker. */ |
2220 | if (size == 0) |
2221 | rounded = 1; |
2222 | |
2223 | /* Round size up to multiple of BIGGEST_ALIGNMENT bits |
2224 | so that each uninitialized object starts on such a boundary. */ |
2225 | rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1; |
2226 | rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT) |
2227 | * (BIGGEST_ALIGNMENT / BITS_PER_UNIT)); |
2228 | |
2229 | if (!sect->noswitch.callback (decl, name, size, rounded) |
2230 | && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded) |
2231 | error ("requested alignment for %q+D is greater than " |
2232 | "implemented alignment of %wu" , decl, rounded); |
2233 | } |
2234 | |
2235 | /* A subroutine of assemble_variable. Output the label and contents of |
2236 | DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA |
2237 | is as for assemble_variable. */ |
2238 | |
2239 | static void |
2240 | assemble_variable_contents (tree decl, const char *name, |
2241 | bool dont_output_data, bool merge_strings) |
2242 | { |
2243 | /* Do any machine/system dependent processing of the object. */ |
2244 | #ifdef ASM_DECLARE_OBJECT_NAME |
2245 | last_assemble_variable_decl = decl; |
2246 | ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl); |
2247 | #else |
2248 | /* Standard thing is just output label for the object. */ |
2249 | ASM_OUTPUT_LABEL (asm_out_file, name); |
2250 | #endif /* ASM_DECLARE_OBJECT_NAME */ |
2251 | |
2252 | if (!dont_output_data) |
2253 | { |
2254 | /* Caller is supposed to use varpool_get_constructor when it wants |
2255 | to output the body. */ |
2256 | gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node); |
2257 | if (DECL_INITIAL (decl) |
2258 | && DECL_INITIAL (decl) != error_mark_node |
2259 | && !initializer_zerop (DECL_INITIAL (decl))) |
2260 | /* Output the actual data. */ |
2261 | output_constant (DECL_INITIAL (decl), |
2262 | tree_to_uhwi (DECL_SIZE_UNIT (decl)), |
2263 | get_variable_align (decl), |
2264 | false, merge_strings); |
2265 | else |
2266 | /* Leave space for it. */ |
2267 | assemble_zeros (size: tree_to_uhwi (DECL_SIZE_UNIT (decl))); |
2268 | targetm.asm_out.decl_end (); |
2269 | } |
2270 | } |
2271 | |
2272 | /* Write out assembly for the variable DECL, which is not defined in |
2273 | the current translation unit. */ |
2274 | void |
2275 | assemble_undefined_decl (tree decl) |
2276 | { |
2277 | const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); |
2278 | targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl); |
2279 | } |
2280 | |
2281 | /* Assemble everything that is needed for a variable or function declaration. |
2282 | Not used for automatic variables, and not used for function definitions. |
2283 | Should not be called for variables of incomplete structure type. |
2284 | |
2285 | TOP_LEVEL is nonzero if this variable has file scope. |
2286 | AT_END is nonzero if this is the special handling, at end of compilation, |
2287 | to define things that have had only tentative definitions. |
2288 | DONT_OUTPUT_DATA if nonzero means don't actually output the |
2289 | initial value (that will be done by the caller). */ |
2290 | |
2291 | void |
2292 | assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED, |
2293 | int at_end ATTRIBUTE_UNUSED, int dont_output_data) |
2294 | { |
2295 | const char *name; |
2296 | rtx decl_rtl, symbol; |
2297 | section *sect; |
2298 | unsigned int align; |
2299 | bool asan_protected = false; |
2300 | |
2301 | /* This function is supposed to handle VARIABLES. Ensure we have one. */ |
2302 | gcc_assert (VAR_P (decl)); |
2303 | |
2304 | /* Emulated TLS had better not get this far. */ |
2305 | gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl)); |
2306 | |
2307 | last_assemble_variable_decl = 0; |
2308 | |
2309 | /* Normally no need to say anything here for external references, |
2310 | since assemble_external is called by the language-specific code |
2311 | when a declaration is first seen. */ |
2312 | |
2313 | if (DECL_EXTERNAL (decl)) |
2314 | return; |
2315 | |
2316 | /* Do nothing for global register variables. */ |
2317 | if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl))) |
2318 | { |
2319 | TREE_ASM_WRITTEN (decl) = 1; |
2320 | return; |
2321 | } |
2322 | |
2323 | /* If type was incomplete when the variable was declared, |
2324 | see if it is complete now. */ |
2325 | |
2326 | if (DECL_SIZE (decl) == 0) |
2327 | layout_decl (decl, 0); |
2328 | |
2329 | /* Still incomplete => don't allocate it; treat the tentative defn |
2330 | (which is what it must have been) as an `extern' reference. */ |
2331 | |
2332 | if (!dont_output_data && DECL_SIZE (decl) == 0) |
2333 | { |
2334 | error ("storage size of %q+D isn%'t known" , decl); |
2335 | TREE_ASM_WRITTEN (decl) = 1; |
2336 | return; |
2337 | } |
2338 | |
2339 | /* The first declaration of a variable that comes through this function |
2340 | decides whether it is global (in C, has external linkage) |
2341 | or local (in C, has internal linkage). So do nothing more |
2342 | if this function has already run. */ |
2343 | |
2344 | if (TREE_ASM_WRITTEN (decl)) |
2345 | return; |
2346 | |
2347 | /* Make sure targetm.encode_section_info is invoked before we set |
2348 | ASM_WRITTEN. */ |
2349 | decl_rtl = DECL_RTL (decl); |
2350 | |
2351 | TREE_ASM_WRITTEN (decl) = 1; |
2352 | |
2353 | /* Do no output if -fsyntax-only. */ |
2354 | if (flag_syntax_only) |
2355 | return; |
2356 | |
2357 | if (! dont_output_data |
2358 | && ! valid_constant_size_p (DECL_SIZE_UNIT (decl))) |
2359 | { |
2360 | error ("size of variable %q+D is too large" , decl); |
2361 | return; |
2362 | } |
2363 | |
2364 | gcc_assert (MEM_P (decl_rtl)); |
2365 | gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF); |
2366 | symbol = XEXP (decl_rtl, 0); |
2367 | |
2368 | /* If this symbol belongs to the tree constant pool, output the constant |
2369 | if it hasn't already been written. */ |
2370 | if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) |
2371 | { |
2372 | tree decl = SYMBOL_REF_DECL (symbol); |
2373 | if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl))) |
2374 | output_constant_def_contents (symbol); |
2375 | return; |
2376 | } |
2377 | |
2378 | app_disable (); |
2379 | |
2380 | name = XSTR (symbol, 0); |
2381 | if (TREE_PUBLIC (decl) && DECL_NAME (decl)) |
2382 | notice_global_symbol (decl); |
2383 | |
2384 | /* Compute the alignment of this data. */ |
2385 | |
2386 | align_variable (decl, dont_output_data); |
2387 | |
2388 | if ((flag_sanitize & SANITIZE_ADDRESS) |
2389 | && asan_protect_global (decl)) |
2390 | { |
2391 | asan_protected = true; |
2392 | SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl), |
2393 | ASAN_RED_ZONE_SIZE * BITS_PER_UNIT)); |
2394 | } |
2395 | |
2396 | set_mem_align (decl_rtl, DECL_ALIGN (decl)); |
2397 | |
2398 | align = get_variable_align (decl); |
2399 | |
2400 | if (TREE_PUBLIC (decl)) |
2401 | maybe_assemble_visibility (decl); |
2402 | |
2403 | if (DECL_PRESERVE_P (decl)) |
2404 | targetm.asm_out.mark_decl_preserved (name); |
2405 | |
2406 | /* First make the assembler name(s) global if appropriate. */ |
2407 | sect = get_variable_section (decl, prefer_noswitch_p: false); |
2408 | if (TREE_PUBLIC (decl) |
2409 | && (sect->common.flags & SECTION_COMMON) == 0) |
2410 | globalize_decl (decl); |
2411 | |
2412 | /* Output any data that we will need to use the address of. */ |
2413 | if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node) |
2414 | output_addressed_constants (DECL_INITIAL (decl), 0); |
2415 | |
2416 | /* dbxout.cc needs to know this. */ |
2417 | if (sect && (sect->common.flags & SECTION_CODE) != 0) |
2418 | DECL_IN_TEXT_SECTION (decl) = 1; |
2419 | |
2420 | /* If the decl is part of an object_block, make sure that the decl |
2421 | has been positioned within its block, but do not write out its |
2422 | definition yet. output_object_blocks will do that later. */ |
2423 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol)) |
2424 | { |
2425 | gcc_assert (!dont_output_data); |
2426 | place_block_symbol (symbol); |
2427 | } |
2428 | else if (SECTION_STYLE (sect) == SECTION_NOSWITCH) |
2429 | assemble_noswitch_variable (decl, name, sect, align); |
2430 | else |
2431 | { |
2432 | /* Special-case handling of vtv comdat sections. */ |
2433 | if (SECTION_STYLE (sect) == SECTION_NAMED |
2434 | && (strcmp (s1: sect->named.name, s2: ".vtable_map_vars" ) == 0)) |
2435 | handle_vtv_comdat_section (sect, decl); |
2436 | else |
2437 | switch_to_section (sect, decl); |
2438 | if (align > BITS_PER_UNIT) |
2439 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); |
2440 | assemble_variable_contents (decl, name, dont_output_data, |
2441 | merge_strings: (sect->common.flags & SECTION_MERGE) |
2442 | && (sect->common.flags & SECTION_STRINGS)); |
2443 | if (asan_protected) |
2444 | { |
2445 | unsigned HOST_WIDE_INT int size |
2446 | = tree_to_uhwi (DECL_SIZE_UNIT (decl)); |
2447 | assemble_zeros (size: asan_red_zone_size (size)); |
2448 | } |
2449 | } |
2450 | } |
2451 | |
2452 | /* Return true if type TYPE contains any pointers. */ |
2453 | |
2454 | static bool |
2455 | contains_pointers_p (tree type) |
2456 | { |
2457 | switch (TREE_CODE (type)) |
2458 | { |
2459 | case POINTER_TYPE: |
2460 | case REFERENCE_TYPE: |
2461 | /* I'm not sure whether OFFSET_TYPE needs this treatment, |
2462 | so I'll play safe and return 1. */ |
2463 | case OFFSET_TYPE: |
2464 | return true; |
2465 | |
2466 | case RECORD_TYPE: |
2467 | case UNION_TYPE: |
2468 | case QUAL_UNION_TYPE: |
2469 | { |
2470 | tree fields; |
2471 | /* For a type that has fields, see if the fields have pointers. */ |
2472 | for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields)) |
2473 | if (TREE_CODE (fields) == FIELD_DECL |
2474 | && contains_pointers_p (TREE_TYPE (fields))) |
2475 | return true; |
2476 | return false; |
2477 | } |
2478 | |
2479 | case ARRAY_TYPE: |
2480 | /* An array type contains pointers if its element type does. */ |
2481 | return contains_pointers_p (TREE_TYPE (type)); |
2482 | |
2483 | default: |
2484 | return false; |
2485 | } |
2486 | } |
2487 | |
2488 | /* We delay assemble_external processing until |
2489 | the compilation unit is finalized. This is the best we can do for |
2490 | right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay |
2491 | it all the way to final. See PR 17982 for further discussion. */ |
2492 | static GTY(()) tree pending_assemble_externals; |
2493 | |
2494 | /* A similar list of pending libcall symbols. We only want to declare |
2495 | symbols that are actually used in the final assembly. */ |
2496 | static GTY(()) rtx pending_libcall_symbols; |
2497 | |
2498 | #ifdef ASM_OUTPUT_EXTERNAL |
2499 | /* Some targets delay some output to final using TARGET_ASM_FILE_END. |
2500 | As a result, assemble_external can be called after the list of externals |
2501 | is processed and the pointer set destroyed. */ |
2502 | static bool pending_assemble_externals_processed; |
2503 | |
2504 | /* Avoid O(external_decls**2) lookups in the pending_assemble_externals |
2505 | TREE_LIST in assemble_external. */ |
2506 | static hash_set<tree> *pending_assemble_externals_set; |
2507 | |
2508 | /* True if DECL is a function decl for which no out-of-line copy exists. |
2509 | It is assumed that DECL's assembler name has been set. */ |
2510 | |
2511 | static bool |
2512 | incorporeal_function_p (tree decl) |
2513 | { |
2514 | if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (node: decl)) |
2515 | { |
2516 | const char *name; |
2517 | |
2518 | if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL |
2519 | && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl))) |
2520 | return true; |
2521 | |
2522 | name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
2523 | /* Atomic or sync builtins which have survived this far will be |
2524 | resolved externally and therefore are not incorporeal. */ |
2525 | if (startswith (str: name, prefix: "__builtin_" )) |
2526 | return true; |
2527 | } |
2528 | return false; |
2529 | } |
2530 | |
2531 | /* Actually do the tests to determine if this is necessary, and invoke |
2532 | ASM_OUTPUT_EXTERNAL. */ |
2533 | static void |
2534 | assemble_external_real (tree decl) |
2535 | { |
2536 | rtx rtl = DECL_RTL (decl); |
2537 | |
2538 | if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF |
2539 | && !SYMBOL_REF_USED (XEXP (rtl, 0)) |
2540 | && !incorporeal_function_p (decl)) |
2541 | { |
2542 | /* Some systems do require some output. */ |
2543 | SYMBOL_REF_USED (XEXP (rtl, 0)) = 1; |
2544 | ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0)); |
2545 | } |
2546 | } |
2547 | #endif |
2548 | |
2549 | void |
2550 | process_pending_assemble_externals (void) |
2551 | { |
2552 | #ifdef ASM_OUTPUT_EXTERNAL |
2553 | tree list; |
2554 | for (list = pending_assemble_externals; list; list = TREE_CHAIN (list)) |
2555 | assemble_external_real (TREE_VALUE (list)); |
2556 | |
2557 | for (rtx list = pending_libcall_symbols; list; list = XEXP (list, 1)) |
2558 | { |
2559 | rtx symbol = XEXP (list, 0); |
2560 | const char *name = targetm.strip_name_encoding (XSTR (symbol, 0)); |
2561 | tree id = get_identifier (name); |
2562 | if (TREE_SYMBOL_REFERENCED (id)) |
2563 | targetm.asm_out.external_libcall (symbol); |
2564 | } |
2565 | |
2566 | pending_assemble_externals = 0; |
2567 | pending_assemble_externals_processed = true; |
2568 | pending_libcall_symbols = NULL_RTX; |
2569 | delete pending_assemble_externals_set; |
2570 | #endif |
2571 | } |
2572 | |
2573 | /* This TREE_LIST contains any weak symbol declarations waiting |
2574 | to be emitted. */ |
2575 | static GTY(()) tree weak_decls; |
2576 | |
2577 | /* Output something to declare an external symbol to the assembler, |
2578 | and qualifiers such as weakness. (Most assemblers don't need |
2579 | extern declaration, so we normally output nothing.) Do nothing if |
2580 | DECL is not external. */ |
2581 | |
2582 | void |
2583 | assemble_external (tree decl ATTRIBUTE_UNUSED) |
2584 | { |
2585 | /* Make sure that the ASM_OUT_FILE is open. |
2586 | If it's not, we should not be calling this function. */ |
2587 | gcc_assert (asm_out_file); |
2588 | |
2589 | /* In a perfect world, the following condition would be true. |
2590 | Sadly, the Go front end emit assembly *from the front end*, |
2591 | bypassing the call graph. See PR52739. Fix before GCC 4.8. */ |
2592 | #if 0 |
2593 | /* This function should only be called if we are expanding, or have |
2594 | expanded, to RTL. |
2595 | Ideally, only final.cc would be calling this function, but it is |
2596 | not clear whether that would break things somehow. See PR 17982 |
2597 | for further discussion. */ |
2598 | gcc_assert (state == EXPANSION |
2599 | || state == FINISHED); |
2600 | #endif |
2601 | |
2602 | if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl)) |
2603 | return; |
2604 | |
2605 | /* We want to output annotation for weak and external symbols at |
2606 | very last to check if they are references or not. */ |
2607 | |
2608 | if (TARGET_SUPPORTS_WEAK |
2609 | && DECL_WEAK (decl) |
2610 | /* TREE_STATIC is a weird and abused creature which is not |
2611 | generally the right test for whether an entity has been |
2612 | locally emitted, inlined or otherwise not-really-extern, but |
2613 | for declarations that can be weak, it happens to be |
2614 | match. */ |
2615 | && !TREE_STATIC (decl) |
2616 | && lookup_attribute (attr_name: "weak" , DECL_ATTRIBUTES (decl)) |
2617 | && value_member (decl, weak_decls) == NULL_TREE) |
2618 | weak_decls = tree_cons (NULL, decl, weak_decls); |
2619 | |
2620 | #ifdef ASM_OUTPUT_EXTERNAL |
2621 | if (pending_assemble_externals_processed) |
2622 | { |
2623 | assemble_external_real (decl); |
2624 | return; |
2625 | } |
2626 | |
2627 | if (! pending_assemble_externals_set->add (k: decl)) |
2628 | pending_assemble_externals = tree_cons (NULL, decl, |
2629 | pending_assemble_externals); |
2630 | #endif |
2631 | } |
2632 | |
2633 | /* Similar, for calling a library function FUN. */ |
2634 | |
2635 | void |
2636 | assemble_external_libcall (rtx fun) |
2637 | { |
2638 | /* Declare library function name external when first used, if nec. */ |
2639 | if (! SYMBOL_REF_USED (fun)) |
2640 | { |
2641 | #ifdef ASM_OUTPUT_EXTERNAL |
2642 | gcc_assert (!pending_assemble_externals_processed); |
2643 | #endif |
2644 | SYMBOL_REF_USED (fun) = 1; |
2645 | /* Make sure the libcall symbol is in the symtab so any |
2646 | reference to it will mark its tree node as referenced, via |
2647 | assemble_name_resolve. These are eventually emitted, if |
2648 | used, in process_pending_assemble_externals. */ |
2649 | const char *name = targetm.strip_name_encoding (XSTR (fun, 0)); |
2650 | get_identifier (name); |
2651 | pending_libcall_symbols = gen_rtx_EXPR_LIST (VOIDmode, fun, |
2652 | pending_libcall_symbols); |
2653 | } |
2654 | } |
2655 | |
2656 | /* Assemble a label named NAME. */ |
2657 | |
2658 | void |
2659 | assemble_label (FILE *file, const char *name) |
2660 | { |
2661 | ASM_OUTPUT_LABEL (file, name); |
2662 | } |
2663 | |
2664 | /* Set the symbol_referenced flag for ID. */ |
2665 | void |
2666 | mark_referenced (tree id) |
2667 | { |
2668 | TREE_SYMBOL_REFERENCED (id) = 1; |
2669 | } |
2670 | |
2671 | /* Set the symbol_referenced flag for DECL and notify callgraph. */ |
2672 | void |
2673 | mark_decl_referenced (tree decl) |
2674 | { |
2675 | if (TREE_CODE (decl) == FUNCTION_DECL) |
2676 | { |
2677 | /* Extern inline functions don't become needed when referenced. |
2678 | If we know a method will be emitted in other TU and no new |
2679 | functions can be marked reachable, just use the external |
2680 | definition. */ |
2681 | struct cgraph_node *node = cgraph_node::get_create (decl); |
2682 | if (!DECL_EXTERNAL (decl) |
2683 | && !node->definition) |
2684 | node->mark_force_output (); |
2685 | } |
2686 | else if (VAR_P (decl)) |
2687 | { |
2688 | varpool_node *node = varpool_node::get_create (decl); |
2689 | /* C++ frontend use mark_decl_references to force COMDAT variables |
2690 | to be output that might appear dead otherwise. */ |
2691 | node->force_output = true; |
2692 | } |
2693 | /* else do nothing - we can get various sorts of CST nodes here, |
2694 | which do not need to be marked. */ |
2695 | } |
2696 | |
2697 | |
2698 | /* Output to FILE (an assembly file) a reference to NAME. If NAME |
2699 | starts with a *, the rest of NAME is output verbatim. Otherwise |
2700 | NAME is transformed in a target-specific way (usually by the |
2701 | addition of an underscore). */ |
2702 | |
2703 | void |
2704 | assemble_name_raw (FILE *file, const char *name) |
2705 | { |
2706 | if (name[0] == '*') |
2707 | fputs (s: &name[1], stream: file); |
2708 | else |
2709 | ASM_OUTPUT_LABELREF (file, name); |
2710 | } |
2711 | |
2712 | /* Return NAME that should actually be emitted, looking through |
2713 | transparent aliases. If NAME refers to an entity that is also |
2714 | represented as a tree (like a function or variable), mark the entity |
2715 | as referenced. */ |
2716 | const char * |
2717 | assemble_name_resolve (const char *name) |
2718 | { |
2719 | const char *real_name = targetm.strip_name_encoding (name); |
2720 | tree id = maybe_get_identifier (real_name); |
2721 | |
2722 | if (id) |
2723 | { |
2724 | tree id_orig = id; |
2725 | |
2726 | mark_referenced (id); |
2727 | ultimate_transparent_alias_target (alias: &id); |
2728 | if (id != id_orig) |
2729 | name = IDENTIFIER_POINTER (id); |
2730 | gcc_assert (! TREE_CHAIN (id)); |
2731 | } |
2732 | |
2733 | return name; |
2734 | } |
2735 | |
2736 | /* Like assemble_name_raw, but should be used when NAME might refer to |
2737 | an entity that is also represented as a tree (like a function or |
2738 | variable). If NAME does refer to such an entity, that entity will |
2739 | be marked as referenced. */ |
2740 | |
2741 | void |
2742 | assemble_name (FILE *file, const char *name) |
2743 | { |
2744 | assemble_name_raw (file, name: assemble_name_resolve (name)); |
2745 | } |
2746 | |
2747 | /* Allocate SIZE bytes writable static space with a gensym name |
2748 | and return an RTX to refer to its address. */ |
2749 | |
2750 | rtx |
2751 | assemble_static_space (unsigned HOST_WIDE_INT size) |
2752 | { |
2753 | char name[17]; |
2754 | const char *namestring; |
2755 | rtx x; |
2756 | |
2757 | ASM_GENERATE_INTERNAL_LABEL (name, "LF" , const_labelno); |
2758 | ++const_labelno; |
2759 | namestring = ggc_strdup (name); |
2760 | |
2761 | x = gen_rtx_SYMBOL_REF (Pmode, namestring); |
2762 | SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL; |
2763 | |
2764 | #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL |
2765 | ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size, |
2766 | BIGGEST_ALIGNMENT); |
2767 | #else |
2768 | #ifdef ASM_OUTPUT_ALIGNED_LOCAL |
2769 | ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT); |
2770 | #else |
2771 | { |
2772 | /* Round size up to multiple of BIGGEST_ALIGNMENT bits |
2773 | so that each uninitialized object starts on such a boundary. */ |
2774 | /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */ |
2775 | unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED |
2776 | = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1) |
2777 | / (BIGGEST_ALIGNMENT / BITS_PER_UNIT) |
2778 | * (BIGGEST_ALIGNMENT / BITS_PER_UNIT)); |
2779 | ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); |
2780 | } |
2781 | #endif |
2782 | #endif |
2783 | return x; |
2784 | } |
2785 | |
2786 | /* Assemble the static constant template for function entry trampolines. |
2787 | This is done at most once per compilation. |
2788 | Returns an RTX for the address of the template. */ |
2789 | |
2790 | static GTY(()) rtx initial_trampoline; |
2791 | |
2792 | rtx |
2793 | assemble_trampoline_template (void) |
2794 | { |
2795 | char label[256]; |
2796 | const char *name; |
2797 | int align; |
2798 | rtx symbol; |
2799 | |
2800 | gcc_assert (targetm.asm_out.trampoline_template != NULL); |
2801 | |
2802 | if (initial_trampoline) |
2803 | return initial_trampoline; |
2804 | |
2805 | /* By default, put trampoline templates in read-only data section. */ |
2806 | |
2807 | #ifdef TRAMPOLINE_SECTION |
2808 | switch_to_section (TRAMPOLINE_SECTION); |
2809 | #else |
2810 | switch_to_section (readonly_data_section); |
2811 | #endif |
2812 | |
2813 | /* Write the assembler code to define one. */ |
2814 | align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT); |
2815 | if (align > 0) |
2816 | ASM_OUTPUT_ALIGN (asm_out_file, align); |
2817 | |
2818 | targetm.asm_out.internal_label (asm_out_file, "LTRAMP" , 0); |
2819 | targetm.asm_out.trampoline_template (asm_out_file); |
2820 | |
2821 | /* Record the rtl to refer to it. */ |
2822 | ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP" , 0); |
2823 | name = ggc_strdup (label); |
2824 | symbol = gen_rtx_SYMBOL_REF (Pmode, name); |
2825 | SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL; |
2826 | |
2827 | initial_trampoline = gen_const_mem (BLKmode, symbol); |
2828 | set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT); |
2829 | set_mem_size (initial_trampoline, TRAMPOLINE_SIZE); |
2830 | |
2831 | return initial_trampoline; |
2832 | } |
2833 | |
2834 | /* A and B are either alignments or offsets. Return the minimum alignment |
2835 | that may be assumed after adding the two together. */ |
2836 | |
2837 | static inline unsigned |
2838 | min_align (unsigned int a, unsigned int b) |
2839 | { |
2840 | return least_bit_hwi (x: a | b); |
2841 | } |
2842 | |
2843 | /* Return the assembler directive for creating a given kind of integer |
2844 | object. SIZE is the number of bytes in the object and ALIGNED_P |
2845 | indicates whether it is known to be aligned. Return NULL if the |
2846 | assembly dialect has no such directive. |
2847 | |
2848 | The returned string should be printed at the start of a new line and |
2849 | be followed immediately by the object's initial value. */ |
2850 | |
2851 | const char * |
2852 | integer_asm_op (int size, int aligned_p) |
2853 | { |
2854 | struct asm_int_op *ops; |
2855 | |
2856 | if (aligned_p) |
2857 | ops = &targetm.asm_out.aligned_op; |
2858 | else |
2859 | ops = &targetm.asm_out.unaligned_op; |
2860 | |
2861 | switch (size) |
2862 | { |
2863 | case 1: |
2864 | return targetm.asm_out.byte_op; |
2865 | case 2: |
2866 | return ops->hi; |
2867 | case 3: |
2868 | return ops->psi; |
2869 | case 4: |
2870 | return ops->si; |
2871 | case 5: |
2872 | case 6: |
2873 | case 7: |
2874 | return ops->pdi; |
2875 | case 8: |
2876 | return ops->di; |
2877 | case 9: |
2878 | case 10: |
2879 | case 11: |
2880 | case 12: |
2881 | case 13: |
2882 | case 14: |
2883 | case 15: |
2884 | return ops->pti; |
2885 | case 16: |
2886 | return ops->ti; |
2887 | default: |
2888 | return NULL; |
2889 | } |
2890 | } |
2891 | |
2892 | /* Use directive OP to assemble an integer object X. Print OP at the |
2893 | start of the line, followed immediately by the value of X. */ |
2894 | |
2895 | void |
2896 | assemble_integer_with_op (const char *op, rtx x) |
2897 | { |
2898 | fputs (s: op, stream: asm_out_file); |
2899 | output_addr_const (asm_out_file, x); |
2900 | fputc (c: '\n', stream: asm_out_file); |
2901 | } |
2902 | |
2903 | /* The default implementation of the asm_out.integer target hook. */ |
2904 | |
2905 | bool |
2906 | default_assemble_integer (rtx x ATTRIBUTE_UNUSED, |
2907 | unsigned int size ATTRIBUTE_UNUSED, |
2908 | int aligned_p ATTRIBUTE_UNUSED) |
2909 | { |
2910 | const char *op = integer_asm_op (size, aligned_p); |
2911 | /* Avoid GAS bugs for large values. Specifically negative values whose |
2912 | absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */ |
2913 | if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS) |
2914 | return false; |
2915 | return op && (assemble_integer_with_op (op, x), true); |
2916 | } |
2917 | |
2918 | /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is |
2919 | the alignment of the integer in bits. Return 1 if we were able to output |
2920 | the constant, otherwise 0. We must be able to output the constant, |
2921 | if FORCE is nonzero. */ |
2922 | |
2923 | bool |
2924 | assemble_integer (rtx x, unsigned int size, unsigned int align, int force) |
2925 | { |
2926 | int aligned_p; |
2927 | |
2928 | aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT)); |
2929 | |
2930 | /* See if the target hook can handle this kind of object. */ |
2931 | if (targetm.asm_out.integer (x, size, aligned_p)) |
2932 | return true; |
2933 | |
2934 | /* If the object is a multi-byte one, try splitting it up. Split |
2935 | it into words it if is multi-word, otherwise split it into bytes. */ |
2936 | if (size > 1) |
2937 | { |
2938 | machine_mode omode, imode; |
2939 | unsigned int subalign; |
2940 | unsigned int subsize, i; |
2941 | enum mode_class mclass; |
2942 | |
2943 | subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1; |
2944 | subalign = MIN (align, subsize * BITS_PER_UNIT); |
2945 | if (GET_CODE (x) == CONST_FIXED) |
2946 | mclass = GET_MODE_CLASS (GET_MODE (x)); |
2947 | else |
2948 | mclass = MODE_INT; |
2949 | |
2950 | omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require (); |
2951 | imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require (); |
2952 | |
2953 | for (i = 0; i < size; i += subsize) |
2954 | { |
2955 | rtx partial = simplify_subreg (outermode: omode, op: x, innermode: imode, byte: i); |
2956 | if (!partial || !assemble_integer (x: partial, size: subsize, align: subalign, force: 0)) |
2957 | break; |
2958 | } |
2959 | if (i == size) |
2960 | return true; |
2961 | |
2962 | /* If we've printed some of it, but not all of it, there's no going |
2963 | back now. */ |
2964 | gcc_assert (!i); |
2965 | } |
2966 | |
2967 | gcc_assert (!force); |
2968 | |
2969 | return false; |
2970 | } |
2971 | |
2972 | /* Assemble the floating-point constant D into an object of size MODE. ALIGN |
2973 | is the alignment of the constant in bits. If REVERSE is true, D is output |
2974 | in reverse storage order. */ |
2975 | |
2976 | void |
2977 | assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align, |
2978 | bool reverse) |
2979 | { |
2980 | long data[4] = {0, 0, 0, 0}; |
2981 | int bitsize, nelts, nunits, units_per; |
2982 | rtx elt; |
2983 | |
2984 | /* This is hairy. We have a quantity of known size. real_to_target |
2985 | will put it into an array of *host* longs, 32 bits per element |
2986 | (even if long is more than 32 bits). We need to determine the |
2987 | number of array elements that are occupied (nelts) and the number |
2988 | of *target* min-addressable units that will be occupied in the |
2989 | object file (nunits). We cannot assume that 32 divides the |
2990 | mode's bitsize (size * BITS_PER_UNIT) evenly. |
2991 | |
2992 | size * BITS_PER_UNIT is used here to make sure that padding bits |
2993 | (which might appear at either end of the value; real_to_target |
2994 | will include the padding bits in its output array) are included. */ |
2995 | |
2996 | nunits = GET_MODE_SIZE (mode); |
2997 | bitsize = nunits * BITS_PER_UNIT; |
2998 | nelts = CEIL (bitsize, 32); |
2999 | units_per = 32 / BITS_PER_UNIT; |
3000 | |
3001 | real_to_target (data, &d, mode); |
3002 | |
3003 | /* Put out the first word with the specified alignment. */ |
3004 | unsigned int chunk_nunits = MIN (nunits, units_per); |
3005 | if (reverse) |
3006 | elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode)); |
3007 | else |
3008 | elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT)); |
3009 | assemble_integer (x: elt, size: chunk_nunits, align, force: 1); |
3010 | nunits -= chunk_nunits; |
3011 | |
3012 | /* Subsequent words need only 32-bit alignment. */ |
3013 | align = min_align (a: align, b: 32); |
3014 | |
3015 | for (int i = 1; i < nelts; i++) |
3016 | { |
3017 | chunk_nunits = MIN (nunits, units_per); |
3018 | if (reverse) |
3019 | elt = flip_storage_order (SImode, |
3020 | gen_int_mode (data[nelts - 1 - i], SImode)); |
3021 | else |
3022 | elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT)); |
3023 | assemble_integer (x: elt, size: chunk_nunits, align, force: 1); |
3024 | nunits -= chunk_nunits; |
3025 | } |
3026 | } |
3027 | |
3028 | /* Given an expression EXP with a constant value, |
3029 | reduce it to the sum of an assembler symbol and an integer. |
3030 | Store them both in the structure *VALUE. |
3031 | EXP must be reducible. */ |
3032 | |
3033 | class addr_const { |
3034 | public: |
3035 | rtx base; |
3036 | poly_int64 offset; |
3037 | }; |
3038 | |
3039 | static void |
3040 | decode_addr_const (tree exp, class addr_const *value) |
3041 | { |
3042 | tree target = TREE_OPERAND (exp, 0); |
3043 | poly_int64 offset = 0; |
3044 | rtx x; |
3045 | |
3046 | while (1) |
3047 | { |
3048 | poly_int64 bytepos; |
3049 | if (TREE_CODE (target) == COMPONENT_REF |
3050 | && poly_int_tree_p (t: byte_position (TREE_OPERAND (target, 1)), |
3051 | value: &bytepos)) |
3052 | { |
3053 | offset += bytepos; |
3054 | target = TREE_OPERAND (target, 0); |
3055 | } |
3056 | else if (TREE_CODE (target) == ARRAY_REF |
3057 | || TREE_CODE (target) == ARRAY_RANGE_REF) |
3058 | { |
3059 | /* Truncate big offset. */ |
3060 | offset |
3061 | += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target))) |
3062 | * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ()); |
3063 | target = TREE_OPERAND (target, 0); |
3064 | } |
3065 | else if (TREE_CODE (target) == MEM_REF |
3066 | && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR) |
3067 | { |
3068 | offset += mem_ref_offset (target).force_shwi (); |
3069 | target = TREE_OPERAND (TREE_OPERAND (target, 0), 0); |
3070 | } |
3071 | else if (INDIRECT_REF_P (target) |
3072 | && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR |
3073 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0)) |
3074 | == ADDR_EXPR) |
3075 | target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0); |
3076 | else |
3077 | break; |
3078 | } |
3079 | |
3080 | switch (TREE_CODE (target)) |
3081 | { |
3082 | case VAR_DECL: |
3083 | case FUNCTION_DECL: |
3084 | x = DECL_RTL (target); |
3085 | break; |
3086 | |
3087 | case LABEL_DECL: |
3088 | x = gen_rtx_MEM (FUNCTION_MODE, |
3089 | gen_rtx_LABEL_REF (Pmode, force_label_rtx (target))); |
3090 | break; |
3091 | |
3092 | case REAL_CST: |
3093 | case FIXED_CST: |
3094 | case STRING_CST: |
3095 | case COMPLEX_CST: |
3096 | case CONSTRUCTOR: |
3097 | case INTEGER_CST: |
3098 | x = lookup_constant_def (target); |
3099 | /* Should have been added by output_addressed_constants. */ |
3100 | gcc_assert (x); |
3101 | break; |
3102 | |
3103 | case INDIRECT_REF: |
3104 | /* This deals with absolute addresses. */ |
3105 | offset += tree_to_shwi (TREE_OPERAND (target, 0)); |
3106 | x = gen_rtx_MEM (QImode, |
3107 | gen_rtx_SYMBOL_REF (Pmode, "origin of addresses" )); |
3108 | break; |
3109 | |
3110 | case COMPOUND_LITERAL_EXPR: |
3111 | gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target)); |
3112 | x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target)); |
3113 | break; |
3114 | |
3115 | default: |
3116 | gcc_unreachable (); |
3117 | } |
3118 | |
3119 | gcc_assert (MEM_P (x)); |
3120 | x = XEXP (x, 0); |
3121 | |
3122 | value->base = x; |
3123 | value->offset = offset; |
3124 | } |
3125 | |
3126 | static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab; |
3127 | |
3128 | static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int); |
3129 | |
3130 | /* Constant pool accessor function. */ |
3131 | |
3132 | hash_table<tree_descriptor_hasher> * |
3133 | constant_pool_htab (void) |
3134 | { |
3135 | return const_desc_htab; |
3136 | } |
3137 | |
3138 | /* Compute a hash code for a constant expression. */ |
3139 | |
3140 | hashval_t |
3141 | tree_descriptor_hasher::hash (constant_descriptor_tree *ptr) |
3142 | { |
3143 | return ptr->hash; |
3144 | } |
3145 | |
3146 | static hashval_t |
3147 | const_hash_1 (const tree exp) |
3148 | { |
3149 | const char *p; |
3150 | hashval_t hi; |
3151 | int len, i; |
3152 | enum tree_code code = TREE_CODE (exp); |
3153 | |
3154 | /* Either set P and LEN to the address and len of something to hash and |
3155 | exit the switch or return a value. */ |
3156 | |
3157 | switch (code) |
3158 | { |
3159 | case INTEGER_CST: |
3160 | p = (char *) &TREE_INT_CST_ELT (exp, 0); |
3161 | len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT); |
3162 | break; |
3163 | |
3164 | case REAL_CST: |
3165 | return real_hash (TREE_REAL_CST_PTR (exp)); |
3166 | |
3167 | case FIXED_CST: |
3168 | return fixed_hash (TREE_FIXED_CST_PTR (exp)); |
3169 | |
3170 | case STRING_CST: |
3171 | p = TREE_STRING_POINTER (exp); |
3172 | len = TREE_STRING_LENGTH (exp); |
3173 | break; |
3174 | |
3175 | case COMPLEX_CST: |
3176 | return (const_hash_1 (TREE_REALPART (exp)) * 5 |
3177 | + const_hash_1 (TREE_IMAGPART (exp))); |
3178 | |
3179 | case VECTOR_CST: |
3180 | { |
3181 | hi = 7 + VECTOR_CST_NPATTERNS (exp); |
3182 | hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp); |
3183 | unsigned int count = vector_cst_encoded_nelts (t: exp); |
3184 | for (unsigned int i = 0; i < count; ++i) |
3185 | hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i)); |
3186 | return hi; |
3187 | } |
3188 | |
3189 | case CONSTRUCTOR: |
3190 | { |
3191 | unsigned HOST_WIDE_INT idx; |
3192 | tree value; |
3193 | |
3194 | hi = 5 + int_size_in_bytes (TREE_TYPE (exp)); |
3195 | |
3196 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) |
3197 | if (value) |
3198 | hi = hi * 603 + const_hash_1 (exp: value); |
3199 | |
3200 | return hi; |
3201 | } |
3202 | |
3203 | case ADDR_EXPR: |
3204 | if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0))) |
3205 | return const_hash_1 (TREE_OPERAND (exp, 0)); |
3206 | |
3207 | /* Fallthru. */ |
3208 | case FDESC_EXPR: |
3209 | { |
3210 | class addr_const value; |
3211 | |
3212 | decode_addr_const (exp, value: &value); |
3213 | switch (GET_CODE (value.base)) |
3214 | { |
3215 | case SYMBOL_REF: |
3216 | /* Don't hash the address of the SYMBOL_REF; |
3217 | only use the offset and the symbol name. */ |
3218 | hi = value.offset.coeffs[0]; |
3219 | p = XSTR (value.base, 0); |
3220 | for (i = 0; p[i] != 0; i++) |
3221 | hi = ((hi * 613) + (unsigned) (p[i])); |
3222 | break; |
3223 | |
3224 | case LABEL_REF: |
3225 | hi = (value.offset.coeffs[0] |
3226 | + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13); |
3227 | break; |
3228 | |
3229 | default: |
3230 | gcc_unreachable (); |
3231 | } |
3232 | } |
3233 | return hi; |
3234 | |
3235 | case PLUS_EXPR: |
3236 | case POINTER_PLUS_EXPR: |
3237 | case MINUS_EXPR: |
3238 | return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9 |
3239 | + const_hash_1 (TREE_OPERAND (exp, 1))); |
3240 | |
3241 | CASE_CONVERT: |
3242 | return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2; |
3243 | |
3244 | default: |
3245 | /* A language specific constant. Just hash the code. */ |
3246 | return code; |
3247 | } |
3248 | |
3249 | /* Compute hashing function. */ |
3250 | hi = len; |
3251 | for (i = 0; i < len; i++) |
3252 | hi = ((hi * 613) + (unsigned) (p[i])); |
3253 | |
3254 | return hi; |
3255 | } |
3256 | |
3257 | /* Wrapper of compare_constant, for the htab interface. */ |
3258 | bool |
3259 | tree_descriptor_hasher::equal (constant_descriptor_tree *c1, |
3260 | constant_descriptor_tree *c2) |
3261 | { |
3262 | if (c1->hash != c2->hash) |
3263 | return false; |
3264 | return compare_constant (c1->value, c2->value); |
3265 | } |
3266 | |
3267 | /* Compare t1 and t2, and return true only if they are known to result in |
3268 | the same bit pattern on output. */ |
3269 | |
3270 | static bool |
3271 | compare_constant (const tree t1, const tree t2) |
3272 | { |
3273 | enum tree_code typecode; |
3274 | |
3275 | if (t1 == NULL_TREE) |
3276 | return t2 == NULL_TREE; |
3277 | if (t2 == NULL_TREE) |
3278 | return false; |
3279 | |
3280 | if (TREE_CODE (t1) != TREE_CODE (t2)) |
3281 | return false; |
3282 | |
3283 | switch (TREE_CODE (t1)) |
3284 | { |
3285 | case INTEGER_CST: |
3286 | /* Integer constants are the same only if the same width of type. */ |
3287 | if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) |
3288 | return false; |
3289 | if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))) |
3290 | return false; |
3291 | return tree_int_cst_equal (t1, t2); |
3292 | |
3293 | case REAL_CST: |
3294 | /* Real constants are the same only if the same width of type. In |
3295 | addition to the same width, we need to check whether the modes are the |
3296 | same. There might be two floating point modes that are the same size |
3297 | but have different representations, such as the PowerPC that has 2 |
3298 | different 128-bit floating point types (IBM extended double and IEEE |
3299 | 128-bit floating point). */ |
3300 | if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) |
3301 | return false; |
3302 | if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))) |
3303 | return false; |
3304 | return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); |
3305 | |
3306 | case FIXED_CST: |
3307 | /* Fixed constants are the same only if the same width of type. */ |
3308 | if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) |
3309 | return false; |
3310 | |
3311 | return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2)); |
3312 | |
3313 | case STRING_CST: |
3314 | if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)) |
3315 | || int_size_in_bytes (TREE_TYPE (t1)) |
3316 | != int_size_in_bytes (TREE_TYPE (t2))) |
3317 | return false; |
3318 | |
3319 | return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) |
3320 | && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), |
3321 | TREE_STRING_LENGTH (t1))); |
3322 | |
3323 | case COMPLEX_CST: |
3324 | return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2)) |
3325 | && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2))); |
3326 | |
3327 | case VECTOR_CST: |
3328 | { |
3329 | if (VECTOR_CST_NPATTERNS (t1) |
3330 | != VECTOR_CST_NPATTERNS (t2)) |
3331 | return false; |
3332 | |
3333 | if (VECTOR_CST_NELTS_PER_PATTERN (t1) |
3334 | != VECTOR_CST_NELTS_PER_PATTERN (t2)) |
3335 | return false; |
3336 | |
3337 | unsigned int count = vector_cst_encoded_nelts (t: t1); |
3338 | for (unsigned int i = 0; i < count; ++i) |
3339 | if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i), |
3340 | VECTOR_CST_ENCODED_ELT (t2, i))) |
3341 | return false; |
3342 | |
3343 | return true; |
3344 | } |
3345 | |
3346 | case CONSTRUCTOR: |
3347 | { |
3348 | vec<constructor_elt, va_gc> *v1, *v2; |
3349 | unsigned HOST_WIDE_INT idx; |
3350 | |
3351 | typecode = TREE_CODE (TREE_TYPE (t1)); |
3352 | if (typecode != TREE_CODE (TREE_TYPE (t2))) |
3353 | return false; |
3354 | |
3355 | if (typecode == ARRAY_TYPE) |
3356 | { |
3357 | HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1)); |
3358 | /* For arrays, check that mode, size and storage order match. */ |
3359 | if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)) |
3360 | || size_1 == -1 |
3361 | || size_1 != int_size_in_bytes (TREE_TYPE (t2)) |
3362 | || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1)) |
3363 | != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2))) |
3364 | return false; |
3365 | } |
3366 | else |
3367 | { |
3368 | /* For record and union constructors, require exact type |
3369 | equality. */ |
3370 | if (TREE_TYPE (t1) != TREE_TYPE (t2)) |
3371 | return false; |
3372 | } |
3373 | |
3374 | v1 = CONSTRUCTOR_ELTS (t1); |
3375 | v2 = CONSTRUCTOR_ELTS (t2); |
3376 | if (vec_safe_length (v: v1) != vec_safe_length (v: v2)) |
3377 | return false; |
3378 | |
3379 | for (idx = 0; idx < vec_safe_length (v: v1); ++idx) |
3380 | { |
3381 | constructor_elt *c1 = &(*v1)[idx]; |
3382 | constructor_elt *c2 = &(*v2)[idx]; |
3383 | |
3384 | /* Check that each value is the same... */ |
3385 | if (!compare_constant (t1: c1->value, t2: c2->value)) |
3386 | return false; |
3387 | /* ... and that they apply to the same fields! */ |
3388 | if (typecode == ARRAY_TYPE) |
3389 | { |
3390 | if (!compare_constant (t1: c1->index, t2: c2->index)) |
3391 | return false; |
3392 | } |
3393 | else |
3394 | { |
3395 | if (c1->index != c2->index) |
3396 | return false; |
3397 | } |
3398 | } |
3399 | |
3400 | return true; |
3401 | } |
3402 | |
3403 | case ADDR_EXPR: |
3404 | case FDESC_EXPR: |
3405 | { |
3406 | class addr_const value1, value2; |
3407 | enum rtx_code code; |
3408 | bool ret; |
3409 | |
3410 | decode_addr_const (exp: t1, value: &value1); |
3411 | decode_addr_const (exp: t2, value: &value2); |
3412 | |
3413 | if (maybe_ne (a: value1.offset, b: value2.offset)) |
3414 | return false; |
3415 | |
3416 | code = GET_CODE (value1.base); |
3417 | if (code != GET_CODE (value2.base)) |
3418 | return false; |
3419 | |
3420 | switch (code) |
3421 | { |
3422 | case SYMBOL_REF: |
3423 | ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0); |
3424 | break; |
3425 | |
3426 | case LABEL_REF: |
3427 | ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base)) |
3428 | == CODE_LABEL_NUMBER (label_ref_label (value2.base))); |
3429 | break; |
3430 | |
3431 | default: |
3432 | gcc_unreachable (); |
3433 | } |
3434 | return ret; |
3435 | } |
3436 | |
3437 | case PLUS_EXPR: |
3438 | case POINTER_PLUS_EXPR: |
3439 | case MINUS_EXPR: |
3440 | case RANGE_EXPR: |
3441 | return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)) |
3442 | && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1))); |
3443 | |
3444 | CASE_CONVERT: |
3445 | case VIEW_CONVERT_EXPR: |
3446 | return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); |
3447 | |
3448 | default: |
3449 | return false; |
3450 | } |
3451 | } |
3452 | |
3453 | /* Return the section into which constant EXP should be placed. */ |
3454 | |
3455 | static section * |
3456 | get_constant_section (tree exp, unsigned int align) |
3457 | { |
3458 | return targetm.asm_out.select_section (exp, |
3459 | compute_reloc_for_constant (exp), |
3460 | align); |
3461 | } |
3462 | |
3463 | /* Return the size of constant EXP in bytes. */ |
3464 | |
3465 | static HOST_WIDE_INT |
3466 | get_constant_size (tree exp) |
3467 | { |
3468 | HOST_WIDE_INT size; |
3469 | |
3470 | size = int_size_in_bytes (TREE_TYPE (exp)); |
3471 | gcc_checking_assert (size >= 0); |
3472 | gcc_checking_assert (TREE_CODE (exp) != STRING_CST |
3473 | || size >= TREE_STRING_LENGTH (exp)); |
3474 | return size; |
3475 | } |
3476 | |
3477 | /* Subroutine of output_constant_def: |
3478 | No constant equal to EXP is known to have been output. |
3479 | Make a constant descriptor to enter EXP in the hash table. |
3480 | Assign the label number and construct RTL to refer to the |
3481 | constant's location in memory. |
3482 | Caller is responsible for updating the hash table. */ |
3483 | |
3484 | static struct constant_descriptor_tree * |
3485 | build_constant_desc (tree exp) |
3486 | { |
3487 | struct constant_descriptor_tree *desc; |
3488 | rtx symbol, rtl; |
3489 | char label[256]; |
3490 | int labelno; |
3491 | tree decl; |
3492 | |
3493 | desc = ggc_alloc<constant_descriptor_tree> (); |
3494 | desc->value = exp; |
3495 | |
3496 | /* Create a string containing the label name, in LABEL. */ |
3497 | labelno = const_labelno++; |
3498 | ASM_GENERATE_INTERNAL_LABEL (label, "LC" , labelno); |
3499 | |
3500 | /* Construct the VAR_DECL associated with the constant. */ |
3501 | decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label), |
3502 | TREE_TYPE (exp)); |
3503 | DECL_ARTIFICIAL (decl) = 1; |
3504 | DECL_IGNORED_P (decl) = 1; |
3505 | TREE_READONLY (decl) = 1; |
3506 | TREE_STATIC (decl) = 1; |
3507 | TREE_ADDRESSABLE (decl) = 1; |
3508 | /* We don't set the RTL yet as this would cause varpool to assume that the |
3509 | variable is referenced. Moreover, it would just be dropped in LTO mode. |
3510 | Instead we set the flag that will be recognized in make_decl_rtl. */ |
3511 | DECL_IN_CONSTANT_POOL (decl) = 1; |
3512 | DECL_INITIAL (decl) = desc->value; |
3513 | /* ??? targetm.constant_alignment hasn't been updated for vector types on |
3514 | most architectures so use DATA_ALIGNMENT as well, except for strings. */ |
3515 | if (TREE_CODE (exp) == STRING_CST) |
3516 | SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl))); |
3517 | else |
3518 | { |
3519 | align_variable (decl, dont_output_data: 0); |
3520 | if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl)) |
3521 | && ((optab_handler (op: movmisalign_optab, DECL_MODE (decl)) |
3522 | != CODE_FOR_nothing) |
3523 | || targetm.slow_unaligned_access (DECL_MODE (decl), |
3524 | DECL_ALIGN (decl)))) |
3525 | SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl))); |
3526 | } |
3527 | |
3528 | /* Now construct the SYMBOL_REF and the MEM. */ |
3529 | if (use_object_blocks_p ()) |
3530 | { |
3531 | int align = (TREE_CODE (decl) == CONST_DECL |
3532 | || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) |
3533 | ? DECL_ALIGN (decl) |
3534 | : symtab_node::get (decl)->definition_alignment ()); |
3535 | section *sect = get_constant_section (exp, align); |
3536 | symbol = create_block_symbol (ggc_strdup (label), |
3537 | block: get_block_for_section (sect), offset: -1); |
3538 | } |
3539 | else |
3540 | symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label)); |
3541 | SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL; |
3542 | SET_SYMBOL_REF_DECL (symbol, decl); |
3543 | TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1; |
3544 | |
3545 | rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol); |
3546 | set_mem_alias_set (rtl, 0); |
3547 | |
3548 | /* Putting EXP into the literal pool might have imposed a different |
3549 | alignment which should be visible in the RTX as well. */ |
3550 | set_mem_align (rtl, DECL_ALIGN (decl)); |
3551 | |
3552 | /* We cannot share RTX'es in pool entries. |
3553 | Mark this piece of RTL as required for unsharing. */ |
3554 | RTX_FLAG (rtl, used) = 1; |
3555 | |
3556 | /* Set flags or add text to the name to record information, such as |
3557 | that it is a local symbol. If the name is changed, the macro |
3558 | ASM_OUTPUT_LABELREF will have to know how to strip this |
3559 | information. This call might invalidate our local variable |
3560 | SYMBOL; we can't use it afterward. */ |
3561 | targetm.encode_section_info (exp, rtl, true); |
3562 | |
3563 | desc->rtl = rtl; |
3564 | |
3565 | return desc; |
3566 | } |
3567 | |
3568 | /* Subroutine of output_constant_def and tree_output_constant_def: |
3569 | Add a constant to the hash table that tracks which constants |
3570 | already have labels. */ |
3571 | |
3572 | static constant_descriptor_tree * |
3573 | add_constant_to_table (tree exp, int defer) |
3574 | { |
3575 | /* The hash table methods may call output_constant_def for addressed |
3576 | constants, so handle them first. */ |
3577 | output_addressed_constants (exp, defer); |
3578 | |
3579 | /* Sanity check to catch recursive insertion. */ |
3580 | static bool inserting; |
3581 | gcc_assert (!inserting); |
3582 | inserting = true; |
3583 | |
3584 | /* Look up EXP in the table of constant descriptors. If we didn't |
3585 | find it, create a new one. */ |
3586 | struct constant_descriptor_tree key; |
3587 | key.value = exp; |
3588 | key.hash = const_hash_1 (exp); |
3589 | constant_descriptor_tree **loc |
3590 | = const_desc_htab->find_slot_with_hash (comparable: &key, hash: key.hash, insert: INSERT); |
3591 | |
3592 | inserting = false; |
3593 | |
3594 | struct constant_descriptor_tree *desc = *loc; |
3595 | if (!desc) |
3596 | { |
3597 | desc = build_constant_desc (exp); |
3598 | desc->hash = key.hash; |
3599 | *loc = desc; |
3600 | } |
3601 | |
3602 | return desc; |
3603 | } |
3604 | |
3605 | /* Return an rtx representing a reference to constant data in memory |
3606 | for the constant expression EXP. |
3607 | |
3608 | If assembler code for such a constant has already been output, |
3609 | return an rtx to refer to it. |
3610 | Otherwise, output such a constant in memory |
3611 | and generate an rtx for it. |
3612 | |
3613 | If DEFER is nonzero, this constant can be deferred and output only |
3614 | if referenced in the function after all optimizations. |
3615 | |
3616 | `const_desc_table' records which constants already have label strings. */ |
3617 | |
3618 | rtx |
3619 | output_constant_def (tree exp, int defer) |
3620 | { |
3621 | struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer); |
3622 | maybe_output_constant_def_contents (desc, defer); |
3623 | return desc->rtl; |
3624 | } |
3625 | |
3626 | /* Subroutine of output_constant_def: Decide whether or not we need to |
3627 | output the constant DESC now, and if so, do it. */ |
3628 | static void |
3629 | maybe_output_constant_def_contents (struct constant_descriptor_tree *desc, |
3630 | int defer) |
3631 | { |
3632 | rtx symbol = XEXP (desc->rtl, 0); |
3633 | tree exp = desc->value; |
3634 | |
3635 | if (flag_syntax_only) |
3636 | return; |
3637 | |
3638 | if (TREE_ASM_WRITTEN (exp)) |
3639 | /* Already output; don't do it again. */ |
3640 | return; |
3641 | |
3642 | /* We can always defer constants as long as the context allows |
3643 | doing so. */ |
3644 | if (defer) |
3645 | { |
3646 | /* Increment n_deferred_constants if it exists. It needs to be at |
3647 | least as large as the number of constants actually referred to |
3648 | by the function. If it's too small we'll stop looking too early |
3649 | and fail to emit constants; if it's too large we'll only look |
3650 | through the entire function when we could have stopped earlier. */ |
3651 | if (cfun) |
3652 | n_deferred_constants++; |
3653 | return; |
3654 | } |
3655 | |
3656 | output_constant_def_contents (symbol); |
3657 | } |
3658 | |
3659 | /* Subroutine of output_constant_def_contents. Output the definition |
3660 | of constant EXP, which is pointed to by label LABEL. ALIGN is the |
3661 | constant's alignment in bits. */ |
3662 | |
3663 | static void |
3664 | assemble_constant_contents (tree exp, const char *label, unsigned int align, |
3665 | bool merge_strings) |
3666 | { |
3667 | HOST_WIDE_INT size; |
3668 | |
3669 | size = get_constant_size (exp); |
3670 | |
3671 | /* Do any machine/system dependent processing of the constant. */ |
3672 | targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size); |
3673 | |
3674 | /* Output the value of EXP. */ |
3675 | output_constant (exp, size, align, false, merge_strings); |
3676 | |
3677 | targetm.asm_out.decl_end (); |
3678 | } |
3679 | |
3680 | /* We must output the constant data referred to by SYMBOL; do so. */ |
3681 | |
3682 | static void |
3683 | output_constant_def_contents (rtx symbol) |
3684 | { |
3685 | tree decl = SYMBOL_REF_DECL (symbol); |
3686 | tree exp = DECL_INITIAL (decl); |
3687 | bool asan_protected = false; |
3688 | |
3689 | /* Make sure any other constants whose addresses appear in EXP |
3690 | are assigned label numbers. */ |
3691 | output_addressed_constants (exp, 0); |
3692 | |
3693 | /* We are no longer deferring this constant. */ |
3694 | TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1; |
3695 | |
3696 | if ((flag_sanitize & SANITIZE_ADDRESS) |
3697 | && TREE_CODE (exp) == STRING_CST |
3698 | && asan_protect_global (exp)) |
3699 | { |
3700 | asan_protected = true; |
3701 | SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl), |
3702 | ASAN_RED_ZONE_SIZE * BITS_PER_UNIT)); |
3703 | } |
3704 | |
3705 | /* If the constant is part of an object block, make sure that the |
3706 | decl has been positioned within its block, but do not write out |
3707 | its definition yet. output_object_blocks will do that later. */ |
3708 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol)) |
3709 | place_block_symbol (symbol); |
3710 | else |
3711 | { |
3712 | int align = (TREE_CODE (decl) == CONST_DECL |
3713 | || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) |
3714 | ? DECL_ALIGN (decl) |
3715 | : symtab_node::get (decl)->definition_alignment ()); |
3716 | section *sect = get_constant_section (exp, align); |
3717 | switch_to_section (sect); |
3718 | if (align > BITS_PER_UNIT) |
3719 | ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); |
3720 | assemble_constant_contents (exp, XSTR (symbol, 0), align, |
3721 | merge_strings: (sect->common.flags & SECTION_MERGE) |
3722 | && (sect->common.flags & SECTION_STRINGS)); |
3723 | if (asan_protected) |
3724 | { |
3725 | HOST_WIDE_INT size = get_constant_size (exp); |
3726 | assemble_zeros (size: asan_red_zone_size (size)); |
3727 | } |
3728 | } |
3729 | } |
3730 | |
3731 | /* Look up EXP in the table of constant descriptors. Return the rtl |
3732 | if it has been emitted, else null. */ |
3733 | |
3734 | rtx |
3735 | lookup_constant_def (tree exp) |
3736 | { |
3737 | struct constant_descriptor_tree key; |
3738 | |
3739 | key.value = exp; |
3740 | key.hash = const_hash_1 (exp); |
3741 | constant_descriptor_tree *desc |
3742 | = const_desc_htab->find_with_hash (comparable: &key, hash: key.hash); |
3743 | |
3744 | return (desc ? desc->rtl : NULL_RTX); |
3745 | } |
3746 | |
3747 | /* Return a tree representing a reference to constant data in memory |
3748 | for the constant expression EXP. |
3749 | |
3750 | This is the counterpart of output_constant_def at the Tree level. */ |
3751 | |
3752 | tree |
3753 | tree_output_constant_def (tree exp) |
3754 | { |
3755 | struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer: 1); |
3756 | tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0)); |
3757 | varpool_node::finalize_decl (decl); |
3758 | return decl; |
3759 | } |
3760 | |
3761 | class GTY((chain_next ("%h.next" ), for_user)) constant_descriptor_rtx { |
3762 | public: |
3763 | class constant_descriptor_rtx *next; |
3764 | rtx mem; |
3765 | rtx sym; |
3766 | rtx constant; |
3767 | HOST_WIDE_INT offset; |
3768 | hashval_t hash; |
3769 | fixed_size_mode mode; |
3770 | unsigned int align; |
3771 | int labelno; |
3772 | int mark; |
3773 | }; |
3774 | |
3775 | struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx> |
3776 | { |
3777 | static hashval_t hash (constant_descriptor_rtx *); |
3778 | static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *); |
3779 | }; |
3780 | |
3781 | /* Used in the hash tables to avoid outputting the same constant |
3782 | twice. Unlike 'struct constant_descriptor_tree', RTX constants |
3783 | are output once per function, not once per file. */ |
3784 | /* ??? Only a few targets need per-function constant pools. Most |
3785 | can use one per-file pool. Should add a targetm bit to tell the |
3786 | difference. */ |
3787 | |
3788 | struct GTY(()) rtx_constant_pool { |
3789 | /* Pointers to first and last constant in pool, as ordered by offset. */ |
3790 | class constant_descriptor_rtx *first; |
3791 | class constant_descriptor_rtx *last; |
3792 | |
3793 | /* Hash facility for making memory-constants from constant rtl-expressions. |
3794 | It is used on RISC machines where immediate integer arguments and |
3795 | constant addresses are restricted so that such constants must be stored |
3796 | in memory. */ |
3797 | hash_table<const_rtx_desc_hasher> *const_rtx_htab; |
3798 | |
3799 | /* Current offset in constant pool (does not include any |
3800 | machine-specific header). */ |
3801 | HOST_WIDE_INT offset; |
3802 | }; |
3803 | |
3804 | /* Hash and compare functions for const_rtx_htab. */ |
3805 | |
3806 | hashval_t |
3807 | const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc) |
3808 | { |
3809 | return desc->hash; |
3810 | } |
3811 | |
3812 | bool |
3813 | const_rtx_desc_hasher::equal (constant_descriptor_rtx *x, |
3814 | constant_descriptor_rtx *y) |
3815 | { |
3816 | if (x->mode != y->mode) |
3817 | return false; |
3818 | return rtx_equal_p (x->constant, y->constant); |
3819 | } |
3820 | |
3821 | /* Hash one component of a constant. */ |
3822 | |
3823 | static hashval_t |
3824 | const_rtx_hash_1 (const_rtx x) |
3825 | { |
3826 | unsigned HOST_WIDE_INT hwi; |
3827 | machine_mode mode; |
3828 | enum rtx_code code; |
3829 | hashval_t h; |
3830 | int i; |
3831 | |
3832 | code = GET_CODE (x); |
3833 | mode = GET_MODE (x); |
3834 | h = (hashval_t) code * 1048573 + mode; |
3835 | |
3836 | switch (code) |
3837 | { |
3838 | case CONST_INT: |
3839 | hwi = INTVAL (x); |
3840 | |
3841 | fold_hwi: |
3842 | { |
3843 | int shift = sizeof (hashval_t) * CHAR_BIT; |
3844 | const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t); |
3845 | |
3846 | h ^= (hashval_t) hwi; |
3847 | for (i = 1; i < n; ++i) |
3848 | { |
3849 | hwi >>= shift; |
3850 | h ^= (hashval_t) hwi; |
3851 | } |
3852 | } |
3853 | break; |
3854 | |
3855 | case CONST_WIDE_INT: |
3856 | hwi = 0; |
3857 | { |
3858 | for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++) |
3859 | hwi ^= CONST_WIDE_INT_ELT (x, i); |
3860 | goto fold_hwi; |
3861 | } |
3862 | |
3863 | case CONST_DOUBLE: |
3864 | if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode) |
3865 | { |
3866 | hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x); |
3867 | goto fold_hwi; |
3868 | } |
3869 | else |
3870 | h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x)); |
3871 | break; |
3872 | |
3873 | case CONST_FIXED: |
3874 | h ^= fixed_hash (CONST_FIXED_VALUE (x)); |
3875 | break; |
3876 | |
3877 | case SYMBOL_REF: |
3878 | h ^= htab_hash_string (XSTR (x, 0)); |
3879 | break; |
3880 | |
3881 | case LABEL_REF: |
3882 | h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x)); |
3883 | break; |
3884 | |
3885 | case UNSPEC: |
3886 | case UNSPEC_VOLATILE: |
3887 | h = h * 251 + XINT (x, 1); |
3888 | break; |
3889 | |
3890 | default: |
3891 | break; |
3892 | } |
3893 | |
3894 | return h; |
3895 | } |
3896 | |
3897 | /* Compute a hash value for X, which should be a constant. */ |
3898 | |
3899 | static hashval_t |
3900 | const_rtx_hash (rtx x) |
3901 | { |
3902 | hashval_t h = 0; |
3903 | subrtx_iterator::array_type array; |
3904 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
3905 | h = h * 509 + const_rtx_hash_1 (x: *iter); |
3906 | return h; |
3907 | } |
3908 | |
3909 | |
3910 | /* Create and return a new rtx constant pool. */ |
3911 | |
3912 | static struct rtx_constant_pool * |
3913 | create_constant_pool (void) |
3914 | { |
3915 | struct rtx_constant_pool *pool; |
3916 | |
3917 | pool = ggc_alloc<rtx_constant_pool> (); |
3918 | pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (n: 31); |
3919 | pool->first = NULL; |
3920 | pool->last = NULL; |
3921 | pool->offset = 0; |
3922 | return pool; |
3923 | } |
3924 | |
3925 | /* Initialize constant pool hashing for a new function. */ |
3926 | |
3927 | void |
3928 | init_varasm_status (void) |
3929 | { |
3930 | crtl->varasm.pool = create_constant_pool (); |
3931 | crtl->varasm.deferred_constants = 0; |
3932 | } |
3933 | |
3934 | /* Given a MINUS expression, simplify it if both sides |
3935 | include the same symbol. */ |
3936 | |
3937 | rtx |
3938 | simplify_subtraction (rtx x) |
3939 | { |
3940 | rtx r = simplify_rtx (x); |
3941 | return r ? r : x; |
3942 | } |
3943 | |
3944 | /* Given a constant rtx X, make (or find) a memory constant for its value |
3945 | and return a MEM rtx to refer to it in memory. IN_MODE is the mode |
3946 | of X. */ |
3947 | |
3948 | rtx |
3949 | force_const_mem (machine_mode in_mode, rtx x) |
3950 | { |
3951 | class constant_descriptor_rtx *desc, tmp; |
3952 | struct rtx_constant_pool *pool; |
3953 | char label[256]; |
3954 | rtx def, symbol; |
3955 | hashval_t hash; |
3956 | unsigned int align; |
3957 | constant_descriptor_rtx **slot; |
3958 | fixed_size_mode mode; |
3959 | |
3960 | /* We can't force variable-sized objects to memory. */ |
3961 | if (!is_a <fixed_size_mode> (m: in_mode, result: &mode)) |
3962 | return NULL_RTX; |
3963 | |
3964 | /* If we're not allowed to drop X into the constant pool, don't. */ |
3965 | if (targetm.cannot_force_const_mem (mode, x)) |
3966 | return NULL_RTX; |
3967 | |
3968 | /* Record that this function has used a constant pool entry. */ |
3969 | crtl->uses_const_pool = 1; |
3970 | |
3971 | /* Decide which pool to use. */ |
3972 | pool = (targetm.use_blocks_for_constant_p (mode, x) |
3973 | ? shared_constant_pool |
3974 | : crtl->varasm.pool); |
3975 | |
3976 | /* Lookup the value in the hashtable. */ |
3977 | tmp.constant = x; |
3978 | tmp.mode = mode; |
3979 | hash = const_rtx_hash (x); |
3980 | slot = pool->const_rtx_htab->find_slot_with_hash (comparable: &tmp, hash, insert: INSERT); |
3981 | desc = *slot; |
3982 | |
3983 | /* If the constant was already present, return its memory. */ |
3984 | if (desc) |
3985 | return copy_rtx (desc->mem); |
3986 | |
3987 | /* Otherwise, create a new descriptor. */ |
3988 | desc = ggc_alloc<constant_descriptor_rtx> (); |
3989 | *slot = desc; |
3990 | |
3991 | /* Align the location counter as required by EXP's data type. */ |
3992 | machine_mode align_mode = (mode == VOIDmode ? word_mode : mode); |
3993 | align = targetm.static_rtx_alignment (align_mode); |
3994 | |
3995 | pool->offset += (align / BITS_PER_UNIT) - 1; |
3996 | pool->offset &= ~ ((align / BITS_PER_UNIT) - 1); |
3997 | |
3998 | desc->next = NULL; |
3999 | desc->constant = copy_rtx (tmp.constant); |
4000 | desc->offset = pool->offset; |
4001 | desc->hash = hash; |
4002 | desc->mode = mode; |
4003 | desc->align = align; |
4004 | desc->labelno = const_labelno; |
4005 | desc->mark = 0; |
4006 | |
4007 | pool->offset += GET_MODE_SIZE (mode); |
4008 | if (pool->last) |
4009 | pool->last->next = desc; |
4010 | else |
4011 | pool->first = pool->last = desc; |
4012 | pool->last = desc; |
4013 | |
4014 | /* Create a string containing the label name, in LABEL. */ |
4015 | ASM_GENERATE_INTERNAL_LABEL (label, "LC" , const_labelno); |
4016 | ++const_labelno; |
4017 | |
4018 | /* Construct the SYMBOL_REF. Make sure to mark it as belonging to |
4019 | the constants pool. */ |
4020 | if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x)) |
4021 | { |
4022 | section *sect = targetm.asm_out.select_rtx_section (mode, x, align); |
4023 | symbol = create_block_symbol (ggc_strdup (label), |
4024 | block: get_block_for_section (sect), offset: -1); |
4025 | } |
4026 | else |
4027 | symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label)); |
4028 | desc->sym = symbol; |
4029 | SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL; |
4030 | CONSTANT_POOL_ADDRESS_P (symbol) = 1; |
4031 | SET_SYMBOL_REF_CONSTANT (symbol, desc); |
4032 | |
4033 | /* Construct the MEM. */ |
4034 | desc->mem = def = gen_const_mem (mode, symbol); |
4035 | set_mem_align (def, align); |
4036 | |
4037 | /* If we're dropping a label to the constant pool, make sure we |
4038 | don't delete it. */ |
4039 | if (GET_CODE (x) == LABEL_REF) |
4040 | LABEL_PRESERVE_P (XEXP (x, 0)) = 1; |
4041 | |
4042 | return copy_rtx (def); |
4043 | } |
4044 | |
4045 | /* Given a constant pool SYMBOL_REF, return the corresponding constant. */ |
4046 | |
4047 | rtx |
4048 | get_pool_constant (const_rtx addr) |
4049 | { |
4050 | return SYMBOL_REF_CONSTANT (addr)->constant; |
4051 | } |
4052 | |
4053 | /* Given a constant pool SYMBOL_REF, return the corresponding constant |
4054 | and whether it has been output or not. */ |
4055 | |
4056 | rtx |
4057 | get_pool_constant_mark (rtx addr, bool *pmarked) |
4058 | { |
4059 | class constant_descriptor_rtx *desc; |
4060 | |
4061 | desc = SYMBOL_REF_CONSTANT (addr); |
4062 | *pmarked = (desc->mark != 0); |
4063 | return desc->constant; |
4064 | } |
4065 | |
4066 | /* Similar, return the mode. */ |
4067 | |
4068 | fixed_size_mode |
4069 | get_pool_mode (const_rtx addr) |
4070 | { |
4071 | return SYMBOL_REF_CONSTANT (addr)->mode; |
4072 | } |
4073 | |
4074 | /* Return TRUE if and only if the constant pool has no entries. Note |
4075 | that even entries we might end up choosing not to emit are counted |
4076 | here, so there is the potential for missed optimizations. */ |
4077 | |
4078 | bool |
4079 | constant_pool_empty_p (void) |
4080 | { |
4081 | return crtl->varasm.pool->first == NULL; |
4082 | } |
4083 | |
4084 | /* Worker function for output_constant_pool_1. Emit assembly for X |
4085 | in MODE with known alignment ALIGN. */ |
4086 | |
4087 | static void |
4088 | output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align) |
4089 | { |
4090 | switch (GET_MODE_CLASS (mode)) |
4091 | { |
4092 | case MODE_FLOAT: |
4093 | case MODE_DECIMAL_FLOAT: |
4094 | { |
4095 | gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x)); |
4096 | assemble_real (d: *CONST_DOUBLE_REAL_VALUE (x), |
4097 | mode: as_a <scalar_float_mode> (m: mode), align, reverse: false); |
4098 | break; |
4099 | } |
4100 | |
4101 | case MODE_INT: |
4102 | case MODE_PARTIAL_INT: |
4103 | case MODE_FRACT: |
4104 | case MODE_UFRACT: |
4105 | case MODE_ACCUM: |
4106 | case MODE_UACCUM: |
4107 | assemble_integer (x, size: GET_MODE_SIZE (mode), align, force: 1); |
4108 | break; |
4109 | |
4110 | case MODE_VECTOR_BOOL: |
4111 | { |
4112 | gcc_assert (GET_CODE (x) == CONST_VECTOR); |
4113 | |
4114 | /* Pick the smallest integer mode that contains at least one |
4115 | whole element. Often this is byte_mode and contains more |
4116 | than one element. */ |
4117 | unsigned int nelts = GET_MODE_NUNITS (mode); |
4118 | unsigned int elt_bits = GET_MODE_PRECISION (mode) / nelts; |
4119 | unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT); |
4120 | scalar_int_mode int_mode = int_mode_for_size (size: int_bits, limit: 0).require (); |
4121 | unsigned int mask = GET_MODE_MASK (GET_MODE_INNER (mode)); |
4122 | |
4123 | /* We allow GET_MODE_PRECISION (mode) <= GET_MODE_BITSIZE (mode) but |
4124 | only properly handle cases where the difference is less than a |
4125 | byte. */ |
4126 | gcc_assert (GET_MODE_BITSIZE (mode) - GET_MODE_PRECISION (mode) < |
4127 | BITS_PER_UNIT); |
4128 | |
4129 | /* Build the constant up one integer at a time. */ |
4130 | unsigned int elts_per_int = int_bits / elt_bits; |
4131 | for (unsigned int i = 0; i < nelts; i += elts_per_int) |
4132 | { |
4133 | unsigned HOST_WIDE_INT value = 0; |
4134 | unsigned int limit = MIN (nelts - i, elts_per_int); |
4135 | for (unsigned int j = 0; j < limit; ++j) |
4136 | { |
4137 | auto elt = INTVAL (CONST_VECTOR_ELT (x, i + j)); |
4138 | value |= (elt & mask) << (j * elt_bits); |
4139 | } |
4140 | output_constant_pool_2 (mode: int_mode, x: gen_int_mode (value, int_mode), |
4141 | align: i != 0 ? MIN (align, int_bits) : align); |
4142 | } |
4143 | break; |
4144 | } |
4145 | case MODE_VECTOR_FLOAT: |
4146 | case MODE_VECTOR_INT: |
4147 | case MODE_VECTOR_FRACT: |
4148 | case MODE_VECTOR_UFRACT: |
4149 | case MODE_VECTOR_ACCUM: |
4150 | case MODE_VECTOR_UACCUM: |
4151 | { |
4152 | int i, units; |
4153 | scalar_mode submode = GET_MODE_INNER (mode); |
4154 | unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode)); |
4155 | |
4156 | gcc_assert (GET_CODE (x) == CONST_VECTOR); |
4157 | units = GET_MODE_NUNITS (mode); |
4158 | |
4159 | for (i = 0; i < units; i++) |
4160 | { |
4161 | rtx elt = CONST_VECTOR_ELT (x, i); |
4162 | output_constant_pool_2 (mode: submode, x: elt, align: i ? subalign : align); |
4163 | } |
4164 | } |
4165 | break; |
4166 | |
4167 | default: |
4168 | gcc_unreachable (); |
4169 | } |
4170 | } |
4171 | |
4172 | /* Worker function for output_constant_pool. Emit constant DESC, |
4173 | giving it ALIGN bits of alignment. */ |
4174 | |
4175 | static void |
4176 | output_constant_pool_1 (class constant_descriptor_rtx *desc, |
4177 | unsigned int align) |
4178 | { |
4179 | rtx x, tmp; |
4180 | |
4181 | x = desc->constant; |
4182 | |
4183 | /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF) |
4184 | whose CODE_LABEL has been deleted. This can occur if a jump table |
4185 | is eliminated by optimization. If so, write a constant of zero |
4186 | instead. Note that this can also happen by turning the |
4187 | CODE_LABEL into a NOTE. */ |
4188 | /* ??? This seems completely and utterly wrong. Certainly it's |
4189 | not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper |
4190 | functioning even with rtx_insn::deleted and friends. */ |
4191 | |
4192 | tmp = x; |
4193 | switch (GET_CODE (tmp)) |
4194 | { |
4195 | case CONST: |
4196 | if (GET_CODE (XEXP (tmp, 0)) != PLUS |
4197 | || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF) |
4198 | break; |
4199 | tmp = XEXP (XEXP (tmp, 0), 0); |
4200 | /* FALLTHRU */ |
4201 | |
4202 | case LABEL_REF: |
4203 | { |
4204 | rtx_insn *insn = label_ref_label (ref: tmp); |
4205 | gcc_assert (!insn->deleted ()); |
4206 | gcc_assert (!NOTE_P (insn) |
4207 | || NOTE_KIND (insn) != NOTE_INSN_DELETED); |
4208 | break; |
4209 | } |
4210 | |
4211 | default: |
4212 | break; |
4213 | } |
4214 | |
4215 | #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY |
4216 | ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode, |
4217 | align, desc->labelno, done); |
4218 | #endif |
4219 | |
4220 | assemble_align (align); |
4221 | |
4222 | /* Output the label. */ |
4223 | targetm.asm_out.internal_label (asm_out_file, "LC" , desc->labelno); |
4224 | |
4225 | /* Output the data. |
4226 | Pass actual alignment value while emitting string constant to asm code |
4227 | as function 'output_constant_pool_1' explicitly passes the alignment as 1 |
4228 | assuming that the data is already aligned which prevents the generation |
4229 | of fix-up table entries. */ |
4230 | output_constant_pool_2 (mode: desc->mode, x, align: desc->align); |
4231 | |
4232 | /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS |
4233 | sections have proper size. */ |
4234 | if (align > GET_MODE_BITSIZE (mode: desc->mode) |
4235 | && in_section |
4236 | && (in_section->common.flags & SECTION_MERGE)) |
4237 | assemble_align (align); |
4238 | |
4239 | #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY |
4240 | done: |
4241 | #endif |
4242 | return; |
4243 | } |
4244 | |
4245 | /* Recompute the offsets of entries in POOL, and the overall size of |
4246 | POOL. Do this after calling mark_constant_pool to ensure that we |
4247 | are computing the offset values for the pool which we will actually |
4248 | emit. */ |
4249 | |
4250 | static void |
4251 | recompute_pool_offsets (struct rtx_constant_pool *pool) |
4252 | { |
4253 | class constant_descriptor_rtx *desc; |
4254 | pool->offset = 0; |
4255 | |
4256 | for (desc = pool->first; desc ; desc = desc->next) |
4257 | if (desc->mark) |
4258 | { |
4259 | /* Recalculate offset. */ |
4260 | unsigned int align = desc->align; |
4261 | pool->offset += (align / BITS_PER_UNIT) - 1; |
4262 | pool->offset &= ~ ((align / BITS_PER_UNIT) - 1); |
4263 | desc->offset = pool->offset; |
4264 | pool->offset += GET_MODE_SIZE (mode: desc->mode); |
4265 | } |
4266 | } |
4267 | |
4268 | /* Mark all constants that are referenced by SYMBOL_REFs in X. |
4269 | Emit referenced deferred strings. */ |
4270 | |
4271 | static void |
4272 | mark_constants_in_pattern (rtx insn) |
4273 | { |
4274 | subrtx_iterator::array_type array; |
4275 | FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL) |
4276 | { |
4277 | const_rtx x = *iter; |
4278 | if (GET_CODE (x) == SYMBOL_REF) |
4279 | { |
4280 | if (CONSTANT_POOL_ADDRESS_P (x)) |
4281 | { |
4282 | class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x); |
4283 | if (desc->mark == 0) |
4284 | { |
4285 | desc->mark = 1; |
4286 | iter.substitute (x: desc->constant); |
4287 | } |
4288 | } |
4289 | else if (TREE_CONSTANT_POOL_ADDRESS_P (x)) |
4290 | { |
4291 | tree decl = SYMBOL_REF_DECL (x); |
4292 | if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl))) |
4293 | { |
4294 | n_deferred_constants--; |
4295 | output_constant_def_contents (CONST_CAST_RTX (x)); |
4296 | } |
4297 | } |
4298 | } |
4299 | } |
4300 | } |
4301 | |
4302 | /* Look through appropriate parts of INSN, marking all entries in the |
4303 | constant pool which are actually being used. Entries that are only |
4304 | referenced by other constants are also marked as used. Emit |
4305 | deferred strings that are used. */ |
4306 | |
4307 | static void |
4308 | mark_constants (rtx_insn *insn) |
4309 | { |
4310 | if (!INSN_P (insn)) |
4311 | return; |
4312 | |
4313 | /* Insns may appear inside a SEQUENCE. Only check the patterns of |
4314 | insns, not any notes that may be attached. We don't want to mark |
4315 | a constant just because it happens to appear in a REG_EQUIV note. */ |
4316 | if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (p: PATTERN (insn))) |
4317 | { |
4318 | int i, n = seq->len (); |
4319 | for (i = 0; i < n; ++i) |
4320 | { |
4321 | rtx subinsn = seq->element (index: i); |
4322 | if (INSN_P (subinsn)) |
4323 | mark_constants_in_pattern (insn: subinsn); |
4324 | } |
4325 | } |
4326 | else |
4327 | mark_constants_in_pattern (insn); |
4328 | } |
4329 | |
4330 | /* Look through the instructions for this function, and mark all the |
4331 | entries in POOL which are actually being used. Emit deferred constants |
4332 | which have indeed been used. */ |
4333 | |
4334 | static void |
4335 | mark_constant_pool (void) |
4336 | { |
4337 | rtx_insn *insn; |
4338 | |
4339 | if (!crtl->uses_const_pool && n_deferred_constants == 0) |
4340 | return; |
4341 | |
4342 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
4343 | mark_constants (insn); |
4344 | } |
4345 | |
4346 | /* Write all the constants in POOL. */ |
4347 | |
4348 | static void |
4349 | output_constant_pool_contents (struct rtx_constant_pool *pool) |
4350 | { |
4351 | class constant_descriptor_rtx *desc; |
4352 | |
4353 | for (desc = pool->first; desc ; desc = desc->next) |
4354 | if (desc->mark < 0) |
4355 | { |
4356 | #ifdef ASM_OUTPUT_DEF |
4357 | gcc_checking_assert (TARGET_SUPPORTS_ALIASES); |
4358 | |
4359 | const char *name = XSTR (desc->sym, 0); |
4360 | char label[256]; |
4361 | char buffer[256 + 32]; |
4362 | const char *p; |
4363 | |
4364 | ASM_GENERATE_INTERNAL_LABEL (label, "LC" , ~desc->mark); |
4365 | p = label; |
4366 | if (desc->offset) |
4367 | { |
4368 | sprintf (s: buffer, format: "%s+" HOST_WIDE_INT_PRINT_DEC, p, desc->offset); |
4369 | p = buffer; |
4370 | } |
4371 | ASM_OUTPUT_DEF (asm_out_file, name, p); |
4372 | #else |
4373 | gcc_unreachable (); |
4374 | #endif |
4375 | } |
4376 | else if (desc->mark) |
4377 | { |
4378 | /* If the constant is part of an object_block, make sure that |
4379 | the constant has been positioned within its block, but do not |
4380 | write out its definition yet. output_object_blocks will do |
4381 | that later. */ |
4382 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym) |
4383 | && SYMBOL_REF_BLOCK (desc->sym)) |
4384 | place_block_symbol (desc->sym); |
4385 | else |
4386 | { |
4387 | switch_to_section (targetm.asm_out.select_rtx_section |
4388 | (desc->mode, desc->constant, desc->align)); |
4389 | output_constant_pool_1 (desc, align: desc->align); |
4390 | } |
4391 | } |
4392 | } |
4393 | |
4394 | struct constant_descriptor_rtx_data { |
4395 | constant_descriptor_rtx *desc; |
4396 | target_unit *bytes; |
4397 | unsigned short size; |
4398 | unsigned short offset; |
4399 | unsigned int hash; |
4400 | }; |
4401 | |
4402 | /* qsort callback to sort constant_descriptor_rtx_data * vector by |
4403 | decreasing size. */ |
4404 | |
4405 | static int |
4406 | constant_descriptor_rtx_data_cmp (const void *p1, const void *p2) |
4407 | { |
4408 | constant_descriptor_rtx_data *const data1 |
4409 | = *(constant_descriptor_rtx_data * const *) p1; |
4410 | constant_descriptor_rtx_data *const data2 |
4411 | = *(constant_descriptor_rtx_data * const *) p2; |
4412 | if (data1->size > data2->size) |
4413 | return -1; |
4414 | if (data1->size < data2->size) |
4415 | return 1; |
4416 | if (data1->hash < data2->hash) |
4417 | return -1; |
4418 | gcc_assert (data1->hash > data2->hash); |
4419 | return 1; |
4420 | } |
4421 | |
4422 | struct const_rtx_data_hasher : nofree_ptr_hash<constant_descriptor_rtx_data> |
4423 | { |
4424 | static hashval_t hash (constant_descriptor_rtx_data *); |
4425 | static bool equal (constant_descriptor_rtx_data *, |
4426 | constant_descriptor_rtx_data *); |
4427 | }; |
4428 | |
4429 | /* Hash and compare functions for const_rtx_data_htab. */ |
4430 | |
4431 | hashval_t |
4432 | const_rtx_data_hasher::hash (constant_descriptor_rtx_data *data) |
4433 | { |
4434 | return data->hash; |
4435 | } |
4436 | |
4437 | bool |
4438 | const_rtx_data_hasher::equal (constant_descriptor_rtx_data *x, |
4439 | constant_descriptor_rtx_data *y) |
4440 | { |
4441 | if (x->hash != y->hash || x->size != y->size) |
4442 | return false; |
4443 | unsigned int align1 = x->desc->align; |
4444 | unsigned int align2 = y->desc->align; |
4445 | unsigned int offset1 = (x->offset * BITS_PER_UNIT) & (align1 - 1); |
4446 | unsigned int offset2 = (y->offset * BITS_PER_UNIT) & (align2 - 1); |
4447 | if (offset1) |
4448 | align1 = least_bit_hwi (x: offset1); |
4449 | if (offset2) |
4450 | align2 = least_bit_hwi (x: offset2); |
4451 | if (align2 > align1) |
4452 | return false; |
4453 | if (memcmp (s1: x->bytes, s2: y->bytes, n: x->size * sizeof (target_unit)) != 0) |
4454 | return false; |
4455 | return true; |
4456 | } |
4457 | |
4458 | /* Attempt to optimize constant pool POOL. If it contains both CONST_VECTOR |
4459 | constants and scalar constants with the values of CONST_VECTOR elements, |
4460 | try to alias the scalar constants with the CONST_VECTOR elements. */ |
4461 | |
4462 | static void |
4463 | optimize_constant_pool (struct rtx_constant_pool *pool) |
4464 | { |
4465 | auto_vec<target_unit, 128> buffer; |
4466 | auto_vec<constant_descriptor_rtx_data *, 128> vec; |
4467 | object_allocator<constant_descriptor_rtx_data> |
4468 | data_pool ("constant_descriptor_rtx_data_pool" ); |
4469 | int idx = 0; |
4470 | size_t size = 0; |
4471 | for (constant_descriptor_rtx *desc = pool->first; desc; desc = desc->next) |
4472 | if (desc->mark > 0 |
4473 | && ! (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym) |
4474 | && SYMBOL_REF_BLOCK (desc->sym))) |
4475 | { |
4476 | buffer.truncate (size: 0); |
4477 | buffer.reserve (nelems: GET_MODE_SIZE (mode: desc->mode)); |
4478 | if (native_encode_rtx (desc->mode, desc->constant, buffer, 0, |
4479 | GET_MODE_SIZE (mode: desc->mode))) |
4480 | { |
4481 | constant_descriptor_rtx_data *data = data_pool.allocate (); |
4482 | data->desc = desc; |
4483 | data->bytes = NULL; |
4484 | data->size = GET_MODE_SIZE (mode: desc->mode); |
4485 | data->offset = 0; |
4486 | data->hash = idx++; |
4487 | size += data->size; |
4488 | vec.safe_push (obj: data); |
4489 | } |
4490 | } |
4491 | if (idx) |
4492 | { |
4493 | vec.qsort (constant_descriptor_rtx_data_cmp); |
4494 | unsigned min_size = vec.last ()->size; |
4495 | target_unit *bytes = XNEWVEC (target_unit, size); |
4496 | unsigned int i; |
4497 | constant_descriptor_rtx_data *data; |
4498 | hash_table<const_rtx_data_hasher> * htab |
4499 | = new hash_table<const_rtx_data_hasher> (31); |
4500 | size = 0; |
4501 | FOR_EACH_VEC_ELT (vec, i, data) |
4502 | { |
4503 | buffer.truncate (size: 0); |
4504 | native_encode_rtx (data->desc->mode, data->desc->constant, |
4505 | buffer, 0, data->size); |
4506 | memcpy (dest: bytes + size, src: buffer.address (), n: data->size); |
4507 | data->bytes = bytes + size; |
4508 | data->hash = iterative_hash (data->bytes, |
4509 | data->size * sizeof (target_unit), 0); |
4510 | size += data->size; |
4511 | constant_descriptor_rtx_data **slot |
4512 | = htab->find_slot_with_hash (comparable: data, hash: data->hash, insert: INSERT); |
4513 | if (*slot) |
4514 | { |
4515 | data->desc->mark = ~(*slot)->desc->labelno; |
4516 | data->desc->offset = (*slot)->offset; |
4517 | } |
4518 | else |
4519 | { |
4520 | unsigned int sz = 1 << floor_log2 (x: data->size); |
4521 | |
4522 | *slot = data; |
4523 | for (sz >>= 1; sz >= min_size; sz >>= 1) |
4524 | for (unsigned off = 0; off + sz <= data->size; off += sz) |
4525 | { |
4526 | constant_descriptor_rtx_data tmp; |
4527 | tmp.desc = data->desc; |
4528 | tmp.bytes = data->bytes + off; |
4529 | tmp.size = sz; |
4530 | tmp.offset = off; |
4531 | tmp.hash = iterative_hash (tmp.bytes, |
4532 | sz * sizeof (target_unit), 0); |
4533 | slot = htab->find_slot_with_hash (comparable: &tmp, hash: tmp.hash, insert: INSERT); |
4534 | if (*slot == NULL) |
4535 | { |
4536 | *slot = data_pool.allocate (); |
4537 | **slot = tmp; |
4538 | } |
4539 | } |
4540 | } |
4541 | } |
4542 | delete htab; |
4543 | XDELETE (bytes); |
4544 | } |
4545 | data_pool.release (); |
4546 | } |
4547 | |
4548 | /* Mark all constants that are used in the current function, then write |
4549 | out the function's private constant pool. */ |
4550 | |
4551 | static void |
4552 | output_constant_pool (const char *fnname ATTRIBUTE_UNUSED, |
4553 | tree fndecl ATTRIBUTE_UNUSED) |
4554 | { |
4555 | struct rtx_constant_pool *pool = crtl->varasm.pool; |
4556 | |
4557 | /* It is possible for gcc to call force_const_mem and then to later |
4558 | discard the instructions which refer to the constant. In such a |
4559 | case we do not need to output the constant. */ |
4560 | mark_constant_pool (); |
4561 | |
4562 | /* Having marked the constant pool entries we'll actually emit, we |
4563 | now need to rebuild the offset information, which may have become |
4564 | stale. */ |
4565 | recompute_pool_offsets (pool); |
4566 | |
4567 | #ifdef ASM_OUTPUT_POOL_PROLOGUE |
4568 | ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset); |
4569 | #endif |
4570 | |
4571 | output_constant_pool_contents (pool); |
4572 | |
4573 | #ifdef ASM_OUTPUT_POOL_EPILOGUE |
4574 | ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset); |
4575 | #endif |
4576 | } |
4577 | |
4578 | /* Write the contents of the shared constant pool. */ |
4579 | |
4580 | void |
4581 | output_shared_constant_pool (void) |
4582 | { |
4583 | if (optimize |
4584 | && TARGET_SUPPORTS_ALIASES) |
4585 | optimize_constant_pool (pool: shared_constant_pool); |
4586 | |
4587 | output_constant_pool_contents (pool: shared_constant_pool); |
4588 | } |
4589 | |
4590 | /* Determine what kind of relocations EXP may need. */ |
4591 | |
4592 | int |
4593 | compute_reloc_for_constant (tree exp) |
4594 | { |
4595 | int reloc = 0, reloc2; |
4596 | tree tem; |
4597 | |
4598 | switch (TREE_CODE (exp)) |
4599 | { |
4600 | case ADDR_EXPR: |
4601 | case FDESC_EXPR: |
4602 | /* Go inside any operations that get_inner_reference can handle and see |
4603 | if what's inside is a constant: no need to do anything here for |
4604 | addresses of variables or functions. */ |
4605 | for (tem = TREE_OPERAND (exp, 0); handled_component_p (t: tem); |
4606 | tem = TREE_OPERAND (tem, 0)) |
4607 | ; |
4608 | |
4609 | if (TREE_CODE (tem) == MEM_REF |
4610 | && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR) |
4611 | { |
4612 | reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0)); |
4613 | break; |
4614 | } |
4615 | |
4616 | if (!targetm.binds_local_p (tem)) |
4617 | reloc |= 2; |
4618 | else |
4619 | reloc |= 1; |
4620 | break; |
4621 | |
4622 | case PLUS_EXPR: |
4623 | case POINTER_PLUS_EXPR: |
4624 | reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); |
4625 | reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1)); |
4626 | break; |
4627 | |
4628 | case MINUS_EXPR: |
4629 | reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); |
4630 | reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1)); |
4631 | /* The difference of two local labels is computable at link time. */ |
4632 | if (reloc == 1 && reloc2 == 1) |
4633 | reloc = 0; |
4634 | else |
4635 | reloc |= reloc2; |
4636 | break; |
4637 | |
4638 | CASE_CONVERT: |
4639 | case VIEW_CONVERT_EXPR: |
4640 | reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); |
4641 | break; |
4642 | |
4643 | case CONSTRUCTOR: |
4644 | { |
4645 | unsigned HOST_WIDE_INT idx; |
4646 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem) |
4647 | if (tem != 0) |
4648 | reloc |= compute_reloc_for_constant (exp: tem); |
4649 | } |
4650 | break; |
4651 | |
4652 | default: |
4653 | break; |
4654 | } |
4655 | return reloc; |
4656 | } |
4657 | |
4658 | /* Find all the constants whose addresses are referenced inside of EXP, |
4659 | and make sure assembler code with a label has been output for each one. |
4660 | Indicate whether an ADDR_EXPR has been encountered. */ |
4661 | |
4662 | static void |
4663 | output_addressed_constants (tree exp, int defer) |
4664 | { |
4665 | tree tem; |
4666 | |
4667 | switch (TREE_CODE (exp)) |
4668 | { |
4669 | case ADDR_EXPR: |
4670 | case FDESC_EXPR: |
4671 | /* Go inside any operations that get_inner_reference can handle and see |
4672 | if what's inside is a constant: no need to do anything here for |
4673 | addresses of variables or functions. */ |
4674 | for (tem = TREE_OPERAND (exp, 0); handled_component_p (t: tem); |
4675 | tem = TREE_OPERAND (tem, 0)) |
4676 | ; |
4677 | |
4678 | /* If we have an initialized CONST_DECL, retrieve the initializer. */ |
4679 | if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem)) |
4680 | tem = DECL_INITIAL (tem); |
4681 | |
4682 | if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR) |
4683 | output_constant_def (exp: tem, defer); |
4684 | |
4685 | if (TREE_CODE (tem) == MEM_REF) |
4686 | output_addressed_constants (TREE_OPERAND (tem, 0), defer); |
4687 | break; |
4688 | |
4689 | case PLUS_EXPR: |
4690 | case POINTER_PLUS_EXPR: |
4691 | case MINUS_EXPR: |
4692 | output_addressed_constants (TREE_OPERAND (exp, 1), defer); |
4693 | gcc_fallthrough (); |
4694 | |
4695 | CASE_CONVERT: |
4696 | case VIEW_CONVERT_EXPR: |
4697 | output_addressed_constants (TREE_OPERAND (exp, 0), defer); |
4698 | break; |
4699 | |
4700 | case CONSTRUCTOR: |
4701 | { |
4702 | unsigned HOST_WIDE_INT idx; |
4703 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem) |
4704 | if (tem != 0) |
4705 | output_addressed_constants (exp: tem, defer); |
4706 | } |
4707 | break; |
4708 | |
4709 | default: |
4710 | break; |
4711 | } |
4712 | } |
4713 | |
4714 | /* Whether a constructor CTOR is a valid static constant initializer if all |
4715 | its elements are. This used to be internal to initializer_constant_valid_p |
4716 | and has been exposed to let other functions like categorize_ctor_elements |
4717 | evaluate the property while walking a constructor for other purposes. */ |
4718 | |
4719 | bool |
4720 | constructor_static_from_elts_p (const_tree ctor) |
4721 | { |
4722 | return (TREE_CONSTANT (ctor) |
4723 | && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE |
4724 | || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE |
4725 | || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)); |
4726 | } |
4727 | |
4728 | static tree initializer_constant_valid_p_1 (tree value, tree endtype, |
4729 | tree *cache); |
4730 | |
4731 | /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR, |
4732 | PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE |
4733 | which are valid when ENDTYPE is an integer of any size; in |
4734 | particular, this does not accept a pointer minus a constant. This |
4735 | returns null_pointer_node if the VALUE is an absolute constant |
4736 | which can be used to initialize a static variable. Otherwise it |
4737 | returns NULL. */ |
4738 | |
4739 | static tree |
4740 | narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache) |
4741 | { |
4742 | tree op0, op1; |
4743 | |
4744 | if (!INTEGRAL_TYPE_P (endtype)) |
4745 | return NULL_TREE; |
4746 | |
4747 | op0 = TREE_OPERAND (value, 0); |
4748 | op1 = TREE_OPERAND (value, 1); |
4749 | |
4750 | /* Like STRIP_NOPS except allow the operand mode to widen. This |
4751 | works around a feature of fold that simplifies (int)(p1 - p2) to |
4752 | ((int)p1 - (int)p2) under the theory that the narrower operation |
4753 | is cheaper. */ |
4754 | |
4755 | while (CONVERT_EXPR_P (op0) |
4756 | || TREE_CODE (op0) == NON_LVALUE_EXPR) |
4757 | { |
4758 | tree inner = TREE_OPERAND (op0, 0); |
4759 | if (inner == error_mark_node |
4760 | || ! INTEGRAL_TYPE_P (TREE_TYPE (op0)) |
4761 | || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (op0))) |
4762 | || ! INTEGRAL_TYPE_P (TREE_TYPE (inner)) |
4763 | || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (inner))) |
4764 | || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0))) |
4765 | > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner))))) |
4766 | break; |
4767 | op0 = inner; |
4768 | } |
4769 | |
4770 | while (CONVERT_EXPR_P (op1) |
4771 | || TREE_CODE (op1) == NON_LVALUE_EXPR) |
4772 | { |
4773 | tree inner = TREE_OPERAND (op1, 0); |
4774 | if (inner == error_mark_node |
4775 | || ! INTEGRAL_TYPE_P (TREE_TYPE (op1)) |
4776 | || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (op1))) |
4777 | || ! INTEGRAL_TYPE_P (TREE_TYPE (inner)) |
4778 | || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (inner))) |
4779 | || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1))) |
4780 | > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner))))) |
4781 | break; |
4782 | op1 = inner; |
4783 | } |
4784 | |
4785 | op0 = initializer_constant_valid_p_1 (value: op0, endtype, cache); |
4786 | if (!op0) |
4787 | return NULL_TREE; |
4788 | |
4789 | op1 = initializer_constant_valid_p_1 (value: op1, endtype, |
4790 | cache: cache ? cache + 2 : NULL); |
4791 | /* Both initializers must be known. */ |
4792 | if (op1) |
4793 | { |
4794 | if (op0 == op1 |
4795 | && (op0 == null_pointer_node |
4796 | || TREE_CODE (value) == MINUS_EXPR)) |
4797 | return null_pointer_node; |
4798 | |
4799 | /* Support differences between labels. */ |
4800 | if (TREE_CODE (op0) == LABEL_DECL |
4801 | && TREE_CODE (op1) == LABEL_DECL) |
4802 | return null_pointer_node; |
4803 | |
4804 | if (TREE_CODE (op0) == STRING_CST |
4805 | && TREE_CODE (op1) == STRING_CST |
4806 | && operand_equal_p (op0, op1, flags: 1)) |
4807 | return null_pointer_node; |
4808 | } |
4809 | |
4810 | return NULL_TREE; |
4811 | } |
4812 | |
4813 | /* Helper function of initializer_constant_valid_p. |
4814 | Return nonzero if VALUE is a valid constant-valued expression |
4815 | for use in initializing a static variable; one that can be an |
4816 | element of a "constant" initializer. |
4817 | |
4818 | Return null_pointer_node if the value is absolute; |
4819 | if it is relocatable, return the variable that determines the relocation. |
4820 | We assume that VALUE has been folded as much as possible; |
4821 | therefore, we do not need to check for such things as |
4822 | arithmetic-combinations of integers. |
4823 | |
4824 | Use CACHE (pointer to 2 tree values) for caching if non-NULL. */ |
4825 | |
4826 | static tree |
4827 | initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache) |
4828 | { |
4829 | tree ret; |
4830 | |
4831 | switch (TREE_CODE (value)) |
4832 | { |
4833 | case CONSTRUCTOR: |
4834 | if (constructor_static_from_elts_p (ctor: value)) |
4835 | { |
4836 | unsigned HOST_WIDE_INT idx; |
4837 | tree elt; |
4838 | bool absolute = true; |
4839 | |
4840 | if (cache && cache[0] == value) |
4841 | return cache[1]; |
4842 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt) |
4843 | { |
4844 | tree reloc; |
4845 | reloc = initializer_constant_valid_p_1 (value: elt, TREE_TYPE (elt), |
4846 | NULL); |
4847 | if (!reloc |
4848 | /* An absolute value is required with reverse SSO. */ |
4849 | || (reloc != null_pointer_node |
4850 | && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value)) |
4851 | && !AGGREGATE_TYPE_P (TREE_TYPE (elt)))) |
4852 | { |
4853 | if (cache) |
4854 | { |
4855 | cache[0] = value; |
4856 | cache[1] = NULL_TREE; |
4857 | } |
4858 | return NULL_TREE; |
4859 | } |
4860 | if (reloc != null_pointer_node) |
4861 | absolute = false; |
4862 | } |
4863 | /* For a non-absolute relocation, there is no single |
4864 | variable that can be "the variable that determines the |
4865 | relocation." */ |
4866 | if (cache) |
4867 | { |
4868 | cache[0] = value; |
4869 | cache[1] = absolute ? null_pointer_node : error_mark_node; |
4870 | } |
4871 | return absolute ? null_pointer_node : error_mark_node; |
4872 | } |
4873 | |
4874 | return TREE_STATIC (value) ? null_pointer_node : NULL_TREE; |
4875 | |
4876 | case INTEGER_CST: |
4877 | case VECTOR_CST: |
4878 | case REAL_CST: |
4879 | case FIXED_CST: |
4880 | case STRING_CST: |
4881 | case COMPLEX_CST: |
4882 | return null_pointer_node; |
4883 | |
4884 | case ADDR_EXPR: |
4885 | case FDESC_EXPR: |
4886 | { |
4887 | tree op0 = staticp (TREE_OPERAND (value, 0)); |
4888 | if (op0) |
4889 | { |
4890 | /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out |
4891 | to be a constant, this is old-skool offsetof-like nonsense. */ |
4892 | if (TREE_CODE (op0) == INDIRECT_REF |
4893 | && TREE_CONSTANT (TREE_OPERAND (op0, 0))) |
4894 | return null_pointer_node; |
4895 | /* Taking the address of a nested function involves a trampoline, |
4896 | unless we don't need or want one. */ |
4897 | if (TREE_CODE (op0) == FUNCTION_DECL |
4898 | && DECL_STATIC_CHAIN (op0) |
4899 | && !TREE_NO_TRAMPOLINE (value)) |
4900 | return NULL_TREE; |
4901 | /* "&{...}" requires a temporary to hold the constructed |
4902 | object. */ |
4903 | if (TREE_CODE (op0) == CONSTRUCTOR) |
4904 | return NULL_TREE; |
4905 | } |
4906 | return op0; |
4907 | } |
4908 | |
4909 | case NON_LVALUE_EXPR: |
4910 | return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), |
4911 | endtype, cache); |
4912 | |
4913 | case VIEW_CONVERT_EXPR: |
4914 | { |
4915 | tree src = TREE_OPERAND (value, 0); |
4916 | tree src_type = TREE_TYPE (src); |
4917 | tree dest_type = TREE_TYPE (value); |
4918 | |
4919 | /* Allow view-conversions from aggregate to non-aggregate type only |
4920 | if the bit pattern is fully preserved afterwards; otherwise, the |
4921 | RTL expander won't be able to apply a subsequent transformation |
4922 | to the underlying constructor. */ |
4923 | if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type)) |
4924 | { |
4925 | if (TYPE_MODE (endtype) == TYPE_MODE (dest_type)) |
4926 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4927 | else |
4928 | return NULL_TREE; |
4929 | } |
4930 | |
4931 | /* Allow all other kinds of view-conversion. */ |
4932 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4933 | } |
4934 | |
4935 | CASE_CONVERT: |
4936 | { |
4937 | tree src = TREE_OPERAND (value, 0); |
4938 | tree src_type = TREE_TYPE (src); |
4939 | tree dest_type = TREE_TYPE (value); |
4940 | |
4941 | /* Allow conversions between pointer types and offset types. */ |
4942 | if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)) |
4943 | || (TREE_CODE (dest_type) == OFFSET_TYPE |
4944 | && TREE_CODE (src_type) == OFFSET_TYPE)) |
4945 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4946 | |
4947 | /* Allow length-preserving conversions between integer types and |
4948 | floating-point types. */ |
4949 | if (((INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)) |
4950 | || (SCALAR_FLOAT_TYPE_P (dest_type) |
4951 | && SCALAR_FLOAT_TYPE_P (src_type))) |
4952 | && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type))) |
4953 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4954 | |
4955 | /* Allow conversions between other integer types only if |
4956 | explicit value. Don't allow sign-extension to a type larger |
4957 | than word and pointer, there aren't relocations that would |
4958 | allow to sign extend it to a wider type. */ |
4959 | if (INTEGRAL_TYPE_P (dest_type) |
4960 | && INTEGRAL_TYPE_P (src_type) |
4961 | && (TYPE_UNSIGNED (src_type) |
4962 | || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type) |
4963 | || TYPE_PRECISION (dest_type) <= BITS_PER_WORD |
4964 | || TYPE_PRECISION (dest_type) <= POINTER_SIZE)) |
4965 | { |
4966 | tree inner = initializer_constant_valid_p_1 (value: src, endtype, cache); |
4967 | if (inner == null_pointer_node) |
4968 | return null_pointer_node; |
4969 | break; |
4970 | } |
4971 | |
4972 | /* Allow (int) &foo provided int is as wide as a pointer. */ |
4973 | if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type) |
4974 | && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))) |
4975 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4976 | |
4977 | /* Likewise conversions from int to pointers, but also allow |
4978 | conversions from 0. */ |
4979 | if ((POINTER_TYPE_P (dest_type) |
4980 | || TREE_CODE (dest_type) == OFFSET_TYPE) |
4981 | && INTEGRAL_TYPE_P (src_type)) |
4982 | { |
4983 | if (TREE_CODE (src) == INTEGER_CST |
4984 | && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)) |
4985 | return null_pointer_node; |
4986 | if (integer_zerop (src)) |
4987 | return null_pointer_node; |
4988 | else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)) |
4989 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4990 | } |
4991 | |
4992 | /* Allow conversions to struct or union types if the value |
4993 | inside is okay. */ |
4994 | if (TREE_CODE (dest_type) == RECORD_TYPE |
4995 | || TREE_CODE (dest_type) == UNION_TYPE) |
4996 | return initializer_constant_valid_p_1 (value: src, endtype, cache); |
4997 | } |
4998 | break; |
4999 | |
5000 | case POINTER_PLUS_EXPR: |
5001 | case PLUS_EXPR: |
5002 | /* Any valid floating-point constants will have been folded by now; |
5003 | with -frounding-math we hit this with addition of two constants. */ |
5004 | if (TREE_CODE (endtype) == REAL_TYPE) |
5005 | return NULL_TREE; |
5006 | if (cache && cache[0] == value) |
5007 | return cache[1]; |
5008 | if (! INTEGRAL_TYPE_P (endtype) |
5009 | || ! INTEGRAL_TYPE_P (TREE_TYPE (value)) |
5010 | || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value))) |
5011 | { |
5012 | tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE }; |
5013 | tree valid0 |
5014 | = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), |
5015 | endtype, cache: ncache); |
5016 | tree valid1 |
5017 | = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1), |
5018 | endtype, cache: ncache + 2); |
5019 | /* If either term is absolute, use the other term's relocation. */ |
5020 | if (valid0 == null_pointer_node) |
5021 | ret = valid1; |
5022 | else if (valid1 == null_pointer_node) |
5023 | ret = valid0; |
5024 | /* Support narrowing pointer differences. */ |
5025 | else |
5026 | ret = narrowing_initializer_constant_valid_p (value, endtype, |
5027 | cache: ncache); |
5028 | } |
5029 | else |
5030 | /* Support narrowing pointer differences. */ |
5031 | ret = narrowing_initializer_constant_valid_p (value, endtype, NULL); |
5032 | if (cache) |
5033 | { |
5034 | cache[0] = value; |
5035 | cache[1] = ret; |
5036 | } |
5037 | return ret; |
5038 | |
5039 | case POINTER_DIFF_EXPR: |
5040 | case MINUS_EXPR: |
5041 | if (TREE_CODE (endtype) == REAL_TYPE) |
5042 | return NULL_TREE; |
5043 | if (cache && cache[0] == value) |
5044 | return cache[1]; |
5045 | if (! INTEGRAL_TYPE_P (endtype) |
5046 | || ! INTEGRAL_TYPE_P (TREE_TYPE (value)) |
5047 | || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value))) |
5048 | { |
5049 | tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE }; |
5050 | tree valid0 |
5051 | = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), |
5052 | endtype, cache: ncache); |
5053 | tree valid1 |
5054 | = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1), |
5055 | endtype, cache: ncache + 2); |
5056 | /* Win if second argument is absolute. */ |
5057 | if (valid1 == null_pointer_node) |
5058 | ret = valid0; |
5059 | /* Win if both arguments have the same relocation. |
5060 | Then the value is absolute. */ |
5061 | else if (valid0 == valid1 && valid0 != 0) |
5062 | ret = null_pointer_node; |
5063 | /* Since GCC guarantees that string constants are unique in the |
5064 | generated code, a subtraction between two copies of the same |
5065 | constant string is absolute. */ |
5066 | else if (valid0 && TREE_CODE (valid0) == STRING_CST |
5067 | && valid1 && TREE_CODE (valid1) == STRING_CST |
5068 | && operand_equal_p (valid0, valid1, flags: 1)) |
5069 | ret = null_pointer_node; |
5070 | /* Support narrowing differences. */ |
5071 | else |
5072 | ret = narrowing_initializer_constant_valid_p (value, endtype, |
5073 | cache: ncache); |
5074 | } |
5075 | else |
5076 | /* Support narrowing differences. */ |
5077 | ret = narrowing_initializer_constant_valid_p (value, endtype, NULL); |
5078 | if (cache) |
5079 | { |
5080 | cache[0] = value; |
5081 | cache[1] = ret; |
5082 | } |
5083 | return ret; |
5084 | |
5085 | default: |
5086 | break; |
5087 | } |
5088 | |
5089 | return NULL_TREE; |
5090 | } |
5091 | |
5092 | /* Return nonzero if VALUE is a valid constant-valued expression |
5093 | for use in initializing a static variable; one that can be an |
5094 | element of a "constant" initializer. |
5095 | |
5096 | Return null_pointer_node if the value is absolute; |
5097 | if it is relocatable, return the variable that determines the relocation. |
5098 | We assume that VALUE has been folded as much as possible; |
5099 | therefore, we do not need to check for such things as |
5100 | arithmetic-combinations of integers. */ |
5101 | tree |
5102 | initializer_constant_valid_p (tree value, tree endtype, bool reverse) |
5103 | { |
5104 | tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL); |
5105 | |
5106 | /* An absolute value is required with reverse storage order. */ |
5107 | if (reloc |
5108 | && reloc != null_pointer_node |
5109 | && reverse |
5110 | && !AGGREGATE_TYPE_P (endtype) |
5111 | && !VECTOR_TYPE_P (endtype)) |
5112 | reloc = NULL_TREE; |
5113 | |
5114 | return reloc; |
5115 | } |
5116 | |
5117 | /* Return true if VALUE is a valid constant-valued expression |
5118 | for use in initializing a static bit-field; one that can be |
5119 | an element of a "constant" initializer. */ |
5120 | |
5121 | bool |
5122 | initializer_constant_valid_for_bitfield_p (const_tree value) |
5123 | { |
5124 | /* For bitfields we support integer constants or possibly nested aggregates |
5125 | of such. */ |
5126 | switch (TREE_CODE (value)) |
5127 | { |
5128 | case CONSTRUCTOR: |
5129 | { |
5130 | unsigned HOST_WIDE_INT idx; |
5131 | const_tree elt; |
5132 | |
5133 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt) |
5134 | if (!initializer_constant_valid_for_bitfield_p (value: elt)) |
5135 | return false; |
5136 | return true; |
5137 | } |
5138 | |
5139 | case INTEGER_CST: |
5140 | case REAL_CST: |
5141 | return true; |
5142 | |
5143 | case VIEW_CONVERT_EXPR: |
5144 | case NON_LVALUE_EXPR: |
5145 | return |
5146 | initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0)); |
5147 | |
5148 | default: |
5149 | break; |
5150 | } |
5151 | |
5152 | return false; |
5153 | } |
5154 | |
5155 | /* Check if a STRING_CST fits into the field. |
5156 | Tolerate only the case when the NUL termination |
5157 | does not fit into the field. */ |
5158 | |
5159 | static bool |
5160 | check_string_literal (tree string, unsigned HOST_WIDE_INT size) |
5161 | { |
5162 | tree type = TREE_TYPE (string); |
5163 | tree eltype = TREE_TYPE (type); |
5164 | unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype)); |
5165 | unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type)); |
5166 | int len = TREE_STRING_LENGTH (string); |
5167 | |
5168 | if (elts != 1 && elts != 2 && elts != 4) |
5169 | return false; |
5170 | if (len < 0 || len % elts != 0) |
5171 | return false; |
5172 | if (size < (unsigned)len) |
5173 | return false; |
5174 | if (mem_size != size) |
5175 | return false; |
5176 | return true; |
5177 | } |
5178 | |
5179 | /* output_constructor outer state of relevance in recursive calls, typically |
5180 | for nested aggregate bitfields. */ |
5181 | |
5182 | struct oc_outer_state { |
5183 | unsigned int bit_offset; /* current position in ... */ |
5184 | int byte; /* ... the outer byte buffer. */ |
5185 | }; |
5186 | |
5187 | static unsigned HOST_WIDE_INT |
5188 | output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool, |
5189 | oc_outer_state *); |
5190 | |
5191 | /* Output assembler code for constant EXP, with no label. |
5192 | This includes the pseudo-op such as ".int" or ".byte", and a newline. |
5193 | Assumes output_addressed_constants has been done on EXP already. |
5194 | |
5195 | Generate at least SIZE bytes of assembler data, padding at the end |
5196 | with zeros if necessary. SIZE must always be specified. The returned |
5197 | value is the actual number of bytes of assembler data generated, which |
5198 | may be bigger than SIZE if the object contains a variable length field. |
5199 | |
5200 | SIZE is important for structure constructors, |
5201 | since trailing members may have been omitted from the constructor. |
5202 | It is also important for initialization of arrays from string constants |
5203 | since the full length of the string constant might not be wanted. |
5204 | It is also needed for initialization of unions, where the initializer's |
5205 | type is just one member, and that may not be as long as the union. |
5206 | |
5207 | There a case in which we would fail to output exactly SIZE bytes: |
5208 | for a structure constructor that wants to produce more than SIZE bytes. |
5209 | But such constructors will never be generated for any possible input. |
5210 | |
5211 | ALIGN is the alignment of the data in bits. |
5212 | |
5213 | If REVERSE is true, EXP is output in reverse storage order. */ |
5214 | |
5215 | static unsigned HOST_WIDE_INT |
5216 | output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align, |
5217 | bool reverse, bool merge_strings) |
5218 | { |
5219 | enum tree_code code; |
5220 | unsigned HOST_WIDE_INT thissize; |
5221 | rtx cst; |
5222 | |
5223 | if (size == 0 || flag_syntax_only) |
5224 | return size; |
5225 | |
5226 | /* See if we're trying to initialize a pointer in a non-default mode |
5227 | to the address of some declaration somewhere. If the target says |
5228 | the mode is valid for pointers, assume the target has a way of |
5229 | resolving it. */ |
5230 | if (TREE_CODE (exp) == NOP_EXPR |
5231 | && POINTER_TYPE_P (TREE_TYPE (exp)) |
5232 | && targetm.addr_space.valid_pointer_mode |
5233 | (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)), |
5234 | TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))))) |
5235 | { |
5236 | tree saved_type = TREE_TYPE (exp); |
5237 | |
5238 | /* Peel off any intermediate conversions-to-pointer for valid |
5239 | pointer modes. */ |
5240 | while (TREE_CODE (exp) == NOP_EXPR |
5241 | && POINTER_TYPE_P (TREE_TYPE (exp)) |
5242 | && targetm.addr_space.valid_pointer_mode |
5243 | (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)), |
5244 | TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))))) |
5245 | exp = TREE_OPERAND (exp, 0); |
5246 | |
5247 | /* If what we're left with is the address of something, we can |
5248 | convert the address to the final type and output it that |
5249 | way. */ |
5250 | if (TREE_CODE (exp) == ADDR_EXPR) |
5251 | exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0)); |
5252 | /* Likewise for constant ints. */ |
5253 | else if (TREE_CODE (exp) == INTEGER_CST) |
5254 | exp = fold_convert (saved_type, exp); |
5255 | |
5256 | } |
5257 | |
5258 | /* Eliminate any conversions since we'll be outputting the underlying |
5259 | constant. */ |
5260 | while (CONVERT_EXPR_P (exp) |
5261 | || TREE_CODE (exp) == NON_LVALUE_EXPR |
5262 | || TREE_CODE (exp) == VIEW_CONVERT_EXPR) |
5263 | { |
5264 | HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp)); |
5265 | HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))); |
5266 | |
5267 | /* Make sure eliminating the conversion is really a no-op, except with |
5268 | VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and |
5269 | union types to allow for Ada unchecked unions. */ |
5270 | if (type_size > op_size |
5271 | && TREE_CODE (exp) != VIEW_CONVERT_EXPR |
5272 | && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE) |
5273 | /* Keep the conversion. */ |
5274 | break; |
5275 | else |
5276 | exp = TREE_OPERAND (exp, 0); |
5277 | } |
5278 | |
5279 | code = TREE_CODE (TREE_TYPE (exp)); |
5280 | thissize = int_size_in_bytes (TREE_TYPE (exp)); |
5281 | |
5282 | /* Allow a constructor with no elements for any data type. |
5283 | This means to fill the space with zeros. */ |
5284 | if (TREE_CODE (exp) == CONSTRUCTOR |
5285 | && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp))) |
5286 | { |
5287 | assemble_zeros (size); |
5288 | return size; |
5289 | } |
5290 | |
5291 | if (TREE_CODE (exp) == FDESC_EXPR) |
5292 | { |
5293 | #ifdef ASM_OUTPUT_FDESC |
5294 | HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1)); |
5295 | tree decl = TREE_OPERAND (exp, 0); |
5296 | ASM_OUTPUT_FDESC (asm_out_file, decl, part); |
5297 | #else |
5298 | gcc_unreachable (); |
5299 | #endif |
5300 | return size; |
5301 | } |
5302 | |
5303 | /* Now output the underlying data. If we've handling the padding, return. |
5304 | Otherwise, break and ensure SIZE is the size written. */ |
5305 | switch (code) |
5306 | { |
5307 | case BOOLEAN_TYPE: |
5308 | case INTEGER_TYPE: |
5309 | case ENUMERAL_TYPE: |
5310 | case POINTER_TYPE: |
5311 | case REFERENCE_TYPE: |
5312 | case OFFSET_TYPE: |
5313 | case FIXED_POINT_TYPE: |
5314 | case NULLPTR_TYPE: |
5315 | cst = expand_expr (exp, NULL_RTX, VOIDmode, modifier: EXPAND_INITIALIZER); |
5316 | if (reverse) |
5317 | cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst); |
5318 | if (!assemble_integer (x: cst, MIN (size, thissize), align, force: 0)) |
5319 | error ("initializer for integer/fixed-point value is too complicated" ); |
5320 | break; |
5321 | |
5322 | case REAL_TYPE: |
5323 | gcc_assert (size == thissize); |
5324 | if (TREE_CODE (exp) != REAL_CST) |
5325 | error ("initializer for floating value is not a floating constant" ); |
5326 | else |
5327 | assemble_real (TREE_REAL_CST (exp), |
5328 | SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)), |
5329 | align, reverse); |
5330 | break; |
5331 | |
5332 | case COMPLEX_TYPE: |
5333 | output_constant (TREE_REALPART (exp), size: thissize / 2, align, |
5334 | reverse, merge_strings: false); |
5335 | output_constant (TREE_IMAGPART (exp), size: thissize / 2, |
5336 | align: min_align (a: align, BITS_PER_UNIT * (thissize / 2)), |
5337 | reverse, merge_strings: false); |
5338 | break; |
5339 | |
5340 | case BITINT_TYPE: |
5341 | if (TREE_CODE (exp) != INTEGER_CST) |
5342 | error ("initializer for %<_BitInt(%d)%> value is not an integer " |
5343 | "constant" , TYPE_PRECISION (TREE_TYPE (exp))); |
5344 | else |
5345 | { |
5346 | struct bitint_info info; |
5347 | tree type = TREE_TYPE (exp); |
5348 | bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info); |
5349 | gcc_assert (ok); |
5350 | scalar_int_mode limb_mode |
5351 | = as_a <scalar_int_mode> (m: info.abi_limb_mode); |
5352 | if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (mode: limb_mode)) |
5353 | { |
5354 | cst = expand_expr (exp, NULL_RTX, VOIDmode, modifier: EXPAND_INITIALIZER); |
5355 | if (reverse) |
5356 | cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst); |
5357 | if (!assemble_integer (x: cst, MIN (size, thissize), align, force: 0)) |
5358 | error ("initializer for integer/fixed-point value is too " |
5359 | "complicated" ); |
5360 | break; |
5361 | } |
5362 | int prec = GET_MODE_PRECISION (mode: limb_mode); |
5363 | int cnt = CEIL (TYPE_PRECISION (type), prec); |
5364 | tree limb_type = build_nonstandard_integer_type (prec, 1); |
5365 | int elt_size = GET_MODE_SIZE (mode: limb_mode); |
5366 | unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (limb_mode)); |
5367 | thissize = 0; |
5368 | if (prec == HOST_BITS_PER_WIDE_INT) |
5369 | for (int i = 0; i < cnt; i++) |
5370 | { |
5371 | int idx = (info.big_endian ^ reverse) ? cnt - 1 - i : i; |
5372 | tree c; |
5373 | if (idx >= TREE_INT_CST_EXT_NUNITS (exp)) |
5374 | c = build_int_cst (limb_type, |
5375 | tree_int_cst_sgn (exp) < 0 ? -1 : 0); |
5376 | else |
5377 | c = build_int_cst (limb_type, |
5378 | TREE_INT_CST_ELT (exp, idx)); |
5379 | output_constant (exp: c, size: elt_size, align: nalign, reverse, merge_strings: false); |
5380 | thissize += elt_size; |
5381 | } |
5382 | else |
5383 | for (int i = 0; i < cnt; i++) |
5384 | { |
5385 | int idx = (info.big_endian ^ reverse) ? cnt - 1 - i : i; |
5386 | wide_int w = wi::rshift (x: wi::to_wide (t: exp), y: idx * prec, |
5387 | TYPE_SIGN (TREE_TYPE (exp))); |
5388 | tree c = wide_int_to_tree (type: limb_type, |
5389 | cst: wide_int::from (x: w, precision: prec, sgn: UNSIGNED)); |
5390 | output_constant (exp: c, size: elt_size, align: nalign, reverse, merge_strings: false); |
5391 | thissize += elt_size; |
5392 | } |
5393 | } |
5394 | break; |
5395 | |
5396 | case ARRAY_TYPE: |
5397 | case VECTOR_TYPE: |
5398 | switch (TREE_CODE (exp)) |
5399 | { |
5400 | case CONSTRUCTOR: |
5401 | return output_constructor (exp, size, align, reverse, NULL); |
5402 | case STRING_CST: |
5403 | thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp); |
5404 | if (merge_strings |
5405 | && (thissize == 0 |
5406 | || TREE_STRING_POINTER (exp) [thissize - 1] != '\0')) |
5407 | thissize++; |
5408 | gcc_checking_assert (check_string_literal (exp, size)); |
5409 | assemble_string (TREE_STRING_POINTER (exp), size: thissize); |
5410 | break; |
5411 | case VECTOR_CST: |
5412 | { |
5413 | scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); |
5414 | unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner)); |
5415 | int elt_size = GET_MODE_SIZE (mode: inner); |
5416 | output_constant (VECTOR_CST_ELT (exp, 0), size: elt_size, align, |
5417 | reverse, merge_strings: false); |
5418 | thissize = elt_size; |
5419 | /* Static constants must have a fixed size. */ |
5420 | unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant (); |
5421 | for (unsigned int i = 1; i < nunits; i++) |
5422 | { |
5423 | output_constant (VECTOR_CST_ELT (exp, i), size: elt_size, align: nalign, |
5424 | reverse, merge_strings: false); |
5425 | thissize += elt_size; |
5426 | } |
5427 | break; |
5428 | } |
5429 | default: |
5430 | gcc_unreachable (); |
5431 | } |
5432 | break; |
5433 | |
5434 | case RECORD_TYPE: |
5435 | case UNION_TYPE: |
5436 | gcc_assert (TREE_CODE (exp) == CONSTRUCTOR); |
5437 | return output_constructor (exp, size, align, reverse, NULL); |
5438 | |
5439 | case ERROR_MARK: |
5440 | return 0; |
5441 | |
5442 | default: |
5443 | gcc_unreachable (); |
5444 | } |
5445 | |
5446 | if (size > thissize) |
5447 | assemble_zeros (size: size - thissize); |
5448 | |
5449 | return size; |
5450 | } |
5451 | |
5452 | /* Subroutine of output_constructor, used for computing the size of |
5453 | arrays of unspecified length. VAL must be a CONSTRUCTOR of an array |
5454 | type with an unspecified upper bound. */ |
5455 | |
5456 | static unsigned HOST_WIDE_INT |
5457 | array_size_for_constructor (tree val) |
5458 | { |
5459 | tree max_index; |
5460 | unsigned HOST_WIDE_INT cnt; |
5461 | tree index, value, tmp; |
5462 | offset_int i; |
5463 | |
5464 | /* This code used to attempt to handle string constants that are not |
5465 | arrays of single-bytes, but nothing else does, so there's no point in |
5466 | doing it here. */ |
5467 | if (TREE_CODE (val) == STRING_CST) |
5468 | return TREE_STRING_LENGTH (val); |
5469 | |
5470 | max_index = NULL_TREE; |
5471 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value) |
5472 | { |
5473 | if (TREE_CODE (index) == RANGE_EXPR) |
5474 | index = TREE_OPERAND (index, 1); |
5475 | if (max_index == NULL_TREE || tree_int_cst_lt (t1: max_index, t2: index)) |
5476 | max_index = index; |
5477 | } |
5478 | |
5479 | if (max_index == NULL_TREE) |
5480 | return 0; |
5481 | |
5482 | /* Compute the total number of array elements. */ |
5483 | tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val))); |
5484 | i = wi::to_offset (t: max_index) - wi::to_offset (t: tmp) + 1; |
5485 | |
5486 | /* Multiply by the array element unit size to find number of bytes. */ |
5487 | i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val)))); |
5488 | |
5489 | gcc_assert (wi::fits_uhwi_p (i)); |
5490 | return i.to_uhwi (); |
5491 | } |
5492 | |
5493 | /* Other datastructures + helpers for output_constructor. */ |
5494 | |
5495 | /* output_constructor local state to support interaction with helpers. */ |
5496 | |
5497 | struct oc_local_state { |
5498 | |
5499 | /* Received arguments. */ |
5500 | tree exp; /* Constructor expression. */ |
5501 | tree type; /* Type of constructor expression. */ |
5502 | unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */ |
5503 | unsigned int align; /* Known initial alignment. */ |
5504 | tree min_index; /* Lower bound if specified for an array. */ |
5505 | |
5506 | /* Output processing state. */ |
5507 | HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */ |
5508 | int byte; /* Part of a bitfield byte yet to be output. */ |
5509 | int last_relative_index; /* Implicit or explicit index of the last |
5510 | array element output within a bitfield. */ |
5511 | bool byte_buffer_in_use; /* Whether BYTE is in use. */ |
5512 | bool reverse; /* Whether reverse storage order is in use. */ |
5513 | |
5514 | /* Current element. */ |
5515 | tree field; /* Current field decl in a record. */ |
5516 | tree val; /* Current element value. */ |
5517 | tree index; /* Current element index. */ |
5518 | |
5519 | }; |
5520 | |
5521 | /* Helper for output_constructor. From the current LOCAL state, output a |
5522 | RANGE_EXPR element. */ |
5523 | |
5524 | static void |
5525 | output_constructor_array_range (oc_local_state *local) |
5526 | { |
5527 | /* Perform the index calculation in modulo arithmetic but |
5528 | sign-extend the result because Ada has negative DECL_FIELD_OFFSETs |
5529 | but we are using an unsigned sizetype. */ |
5530 | unsigned prec = TYPE_PRECISION (sizetype); |
5531 | offset_int idx = wi::sext (x: wi::to_offset (TREE_OPERAND (local->index, 0)) |
5532 | - wi::to_offset (t: local->min_index), offset: prec); |
5533 | tree valtype = TREE_TYPE (local->val); |
5534 | HOST_WIDE_INT fieldpos |
5535 | = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr (); |
5536 | |
5537 | /* Advance to offset of this element. */ |
5538 | if (fieldpos > local->total_bytes) |
5539 | { |
5540 | assemble_zeros (size: fieldpos - local->total_bytes); |
5541 | local->total_bytes = fieldpos; |
5542 | } |
5543 | else |
5544 | /* Must not go backwards. */ |
5545 | gcc_assert (fieldpos == local->total_bytes); |
5546 | |
5547 | unsigned HOST_WIDE_INT fieldsize |
5548 | = int_size_in_bytes (TREE_TYPE (local->type)); |
5549 | |
5550 | HOST_WIDE_INT lo_index |
5551 | = tree_to_shwi (TREE_OPERAND (local->index, 0)); |
5552 | HOST_WIDE_INT hi_index |
5553 | = tree_to_shwi (TREE_OPERAND (local->index, 1)); |
5554 | HOST_WIDE_INT index; |
5555 | |
5556 | unsigned int align2 |
5557 | = min_align (a: local->align, b: fieldsize * BITS_PER_UNIT); |
5558 | |
5559 | for (index = lo_index; index <= hi_index; index++) |
5560 | { |
5561 | /* Output the element's initial value. */ |
5562 | if (local->val == NULL_TREE) |
5563 | assemble_zeros (size: fieldsize); |
5564 | else |
5565 | fieldsize = output_constant (exp: local->val, size: fieldsize, align: align2, |
5566 | reverse: local->reverse, merge_strings: false); |
5567 | |
5568 | /* Count its size. */ |
5569 | local->total_bytes += fieldsize; |
5570 | } |
5571 | } |
5572 | |
5573 | /* Helper for output_constructor. From the current LOCAL state, output a |
5574 | field element that is not true bitfield or part of an outer one. */ |
5575 | |
5576 | static void |
5577 | output_constructor_regular_field (oc_local_state *local) |
5578 | { |
5579 | /* Field size and position. Since this structure is static, we know the |
5580 | positions are constant. */ |
5581 | unsigned HOST_WIDE_INT fieldsize; |
5582 | HOST_WIDE_INT fieldpos; |
5583 | |
5584 | unsigned int align2; |
5585 | |
5586 | /* Output any buffered-up bit-fields preceding this element. */ |
5587 | if (local->byte_buffer_in_use) |
5588 | { |
5589 | assemble_integer (GEN_INT (local->byte), size: 1, BITS_PER_UNIT, force: 1); |
5590 | local->total_bytes++; |
5591 | local->byte_buffer_in_use = false; |
5592 | } |
5593 | |
5594 | if (local->index != NULL_TREE) |
5595 | { |
5596 | /* Perform the index calculation in modulo arithmetic but |
5597 | sign-extend the result because Ada has negative DECL_FIELD_OFFSETs |
5598 | but we are using an unsigned sizetype. */ |
5599 | unsigned prec = TYPE_PRECISION (sizetype); |
5600 | offset_int idx = wi::sext (x: wi::to_offset (t: local->index) |
5601 | - wi::to_offset (t: local->min_index), offset: prec); |
5602 | fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val)))) |
5603 | .to_short_addr (); |
5604 | } |
5605 | else if (local->field != NULL_TREE) |
5606 | fieldpos = int_byte_position (local->field); |
5607 | else |
5608 | fieldpos = 0; |
5609 | |
5610 | /* Advance to offset of this element. |
5611 | Note no alignment needed in an array, since that is guaranteed |
5612 | if each element has the proper size. */ |
5613 | if (local->field != NULL_TREE || local->index != NULL_TREE) |
5614 | { |
5615 | if (fieldpos > local->total_bytes) |
5616 | { |
5617 | assemble_zeros (size: fieldpos - local->total_bytes); |
5618 | local->total_bytes = fieldpos; |
5619 | } |
5620 | else |
5621 | /* Must not go backwards. */ |
5622 | gcc_assert (fieldpos == local->total_bytes); |
5623 | } |
5624 | |
5625 | /* Find the alignment of this element. */ |
5626 | align2 = min_align (a: local->align, BITS_PER_UNIT * fieldpos); |
5627 | |
5628 | /* Determine size this element should occupy. */ |
5629 | if (local->field) |
5630 | { |
5631 | fieldsize = 0; |
5632 | |
5633 | /* If this is an array with an unspecified upper bound, |
5634 | the initializer determines the size. */ |
5635 | /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL, |
5636 | but we cannot do this until the deprecated support for |
5637 | initializing zero-length array members is removed. */ |
5638 | if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE |
5639 | && (!TYPE_DOMAIN (TREE_TYPE (local->field)) |
5640 | || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field))))) |
5641 | { |
5642 | unsigned HOST_WIDE_INT fldsize |
5643 | = array_size_for_constructor (val: local->val); |
5644 | fieldsize = int_size_in_bytes (TREE_TYPE (local->val)); |
5645 | /* In most cases fieldsize == fldsize as the size of the initializer |
5646 | determines how many elements the flexible array member has. For |
5647 | C++ fldsize can be smaller though, if the last or several last or |
5648 | all initializers of the flexible array member have side-effects |
5649 | and the FE splits them into dynamic initialization. */ |
5650 | gcc_checking_assert (fieldsize >= fldsize); |
5651 | /* Given a non-empty initialization, this field had better |
5652 | be last. Given a flexible array member, the next field |
5653 | on the chain is a TYPE_DECL of the enclosing struct. */ |
5654 | const_tree next = DECL_CHAIN (local->field); |
5655 | gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL); |
5656 | } |
5657 | else |
5658 | fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field)); |
5659 | } |
5660 | else |
5661 | fieldsize = int_size_in_bytes (TREE_TYPE (local->type)); |
5662 | |
5663 | /* Output the element's initial value. */ |
5664 | if (local->val == NULL_TREE) |
5665 | assemble_zeros (size: fieldsize); |
5666 | else |
5667 | fieldsize = output_constant (exp: local->val, size: fieldsize, align: align2, |
5668 | reverse: local->reverse, merge_strings: false); |
5669 | |
5670 | /* Count its size. */ |
5671 | local->total_bytes += fieldsize; |
5672 | } |
5673 | |
5674 | /* Helper for output_constructor. From the LOCAL state, output an element |
5675 | that is a true bitfield or part of an outer one. BIT_OFFSET is the offset |
5676 | from the start of a possibly ongoing outer byte buffer. */ |
5677 | |
5678 | static void |
5679 | output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset) |
5680 | { |
5681 | /* Bit size of this element. */ |
5682 | HOST_WIDE_INT ebitsize |
5683 | = (local->field |
5684 | ? tree_to_uhwi (DECL_SIZE (local->field)) |
5685 | : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type)))); |
5686 | |
5687 | /* Relative index of this element if this is an array component. */ |
5688 | HOST_WIDE_INT relative_index |
5689 | = (local->field |
5690 | ? 0 |
5691 | : (local->index |
5692 | ? tree_to_uhwi (local->index) - tree_to_uhwi (local->min_index) |
5693 | : local->last_relative_index + 1)); |
5694 | |
5695 | /* Bit position of this element from the start of the containing |
5696 | constructor. */ |
5697 | HOST_WIDE_INT constructor_relative_ebitpos |
5698 | = (local->field |
5699 | ? int_bit_position (field: local->field) |
5700 | : ebitsize * relative_index); |
5701 | |
5702 | /* Bit position of this element from the start of a possibly ongoing |
5703 | outer byte buffer. */ |
5704 | HOST_WIDE_INT byte_relative_ebitpos |
5705 | = bit_offset + constructor_relative_ebitpos; |
5706 | |
5707 | /* From the start of a possibly ongoing outer byte buffer, offsets to |
5708 | the first bit of this element and to the first bit past the end of |
5709 | this element. */ |
5710 | HOST_WIDE_INT next_offset = byte_relative_ebitpos; |
5711 | HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize; |
5712 | |
5713 | local->last_relative_index = relative_index; |
5714 | |
5715 | if (local->val == NULL_TREE) |
5716 | local->val = integer_zero_node; |
5717 | |
5718 | while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR |
5719 | || TREE_CODE (local->val) == NON_LVALUE_EXPR) |
5720 | local->val = TREE_OPERAND (local->val, 0); |
5721 | |
5722 | if (TREE_CODE (local->val) != INTEGER_CST |
5723 | && TREE_CODE (local->val) != CONSTRUCTOR) |
5724 | { |
5725 | error ("invalid initial value for member %qE" , DECL_NAME (local->field)); |
5726 | return; |
5727 | } |
5728 | |
5729 | /* If this field does not start in this (or next) byte, skip some bytes. */ |
5730 | if (next_offset / BITS_PER_UNIT != local->total_bytes) |
5731 | { |
5732 | /* Output remnant of any bit field in previous bytes. */ |
5733 | if (local->byte_buffer_in_use) |
5734 | { |
5735 | assemble_integer (GEN_INT (local->byte), size: 1, BITS_PER_UNIT, force: 1); |
5736 | local->total_bytes++; |
5737 | local->byte_buffer_in_use = false; |
5738 | } |
5739 | |
5740 | /* If still not at proper byte, advance to there. */ |
5741 | if (next_offset / BITS_PER_UNIT != local->total_bytes) |
5742 | { |
5743 | gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes); |
5744 | assemble_zeros (size: next_offset / BITS_PER_UNIT - local->total_bytes); |
5745 | local->total_bytes = next_offset / BITS_PER_UNIT; |
5746 | } |
5747 | } |
5748 | |
5749 | /* Set up the buffer if necessary. */ |
5750 | if (!local->byte_buffer_in_use) |
5751 | { |
5752 | local->byte = 0; |
5753 | if (ebitsize > 0) |
5754 | local->byte_buffer_in_use = true; |
5755 | } |
5756 | |
5757 | /* If this is nested constructor, recurse passing the bit offset and the |
5758 | pending data, then retrieve the new pending data afterwards. */ |
5759 | if (TREE_CODE (local->val) == CONSTRUCTOR) |
5760 | { |
5761 | oc_outer_state temp_state; |
5762 | temp_state.bit_offset = next_offset % BITS_PER_UNIT; |
5763 | temp_state.byte = local->byte; |
5764 | local->total_bytes |
5765 | += output_constructor (local->val, 0, 0, local->reverse, &temp_state); |
5766 | local->byte = temp_state.byte; |
5767 | return; |
5768 | } |
5769 | |
5770 | /* Otherwise, we must split the element into pieces that fall within |
5771 | separate bytes, and combine each byte with previous or following |
5772 | bit-fields. */ |
5773 | while (next_offset < end_offset) |
5774 | { |
5775 | int this_time; |
5776 | int shift; |
5777 | unsigned HOST_WIDE_INT value; |
5778 | HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT; |
5779 | HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT; |
5780 | |
5781 | /* Advance from byte to byte within this element when necessary. */ |
5782 | while (next_byte != local->total_bytes) |
5783 | { |
5784 | assemble_integer (GEN_INT (local->byte), size: 1, BITS_PER_UNIT, force: 1); |
5785 | local->total_bytes++; |
5786 | local->byte = 0; |
5787 | } |
5788 | |
5789 | /* Number of bits we can process at once (all part of the same byte). */ |
5790 | this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit); |
5791 | if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
5792 | { |
5793 | /* For big-endian data, take the most significant bits (of the |
5794 | bits that are significant) first and put them into bytes from |
5795 | the most significant end. */ |
5796 | shift = end_offset - next_offset - this_time; |
5797 | |
5798 | /* Don't try to take a bunch of bits that cross |
5799 | the word boundary in the INTEGER_CST. We can |
5800 | only select bits from one element. */ |
5801 | if ((shift / HOST_BITS_PER_WIDE_INT) |
5802 | != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT)) |
5803 | { |
5804 | const int end = shift + this_time - 1; |
5805 | shift = end & -HOST_BITS_PER_WIDE_INT; |
5806 | this_time = end - shift + 1; |
5807 | } |
5808 | |
5809 | /* Now get the bits we want to insert. */ |
5810 | value = wi::extract_uhwi (x: wi::to_widest (t: local->val), |
5811 | bitpos: shift, width: this_time); |
5812 | |
5813 | /* Get the result. This works only when: |
5814 | 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */ |
5815 | local->byte |= value << (BITS_PER_UNIT - this_time - next_bit); |
5816 | } |
5817 | else |
5818 | { |
5819 | /* On little-endian machines, take the least significant bits of |
5820 | the value first and pack them starting at the least significant |
5821 | bits of the bytes. */ |
5822 | shift = next_offset - byte_relative_ebitpos; |
5823 | |
5824 | /* Don't try to take a bunch of bits that cross |
5825 | the word boundary in the INTEGER_CST. We can |
5826 | only select bits from one element. */ |
5827 | if ((shift / HOST_BITS_PER_WIDE_INT) |
5828 | != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT)) |
5829 | this_time |
5830 | = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1)); |
5831 | |
5832 | /* Now get the bits we want to insert. */ |
5833 | value = wi::extract_uhwi (x: wi::to_widest (t: local->val), |
5834 | bitpos: shift, width: this_time); |
5835 | |
5836 | /* Get the result. This works only when: |
5837 | 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */ |
5838 | local->byte |= value << next_bit; |
5839 | } |
5840 | |
5841 | next_offset += this_time; |
5842 | local->byte_buffer_in_use = true; |
5843 | } |
5844 | } |
5845 | |
5846 | /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants). |
5847 | Generate at least SIZE bytes, padding if necessary. OUTER designates the |
5848 | caller output state of relevance in recursive invocations. */ |
5849 | |
5850 | static unsigned HOST_WIDE_INT |
5851 | output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align, |
5852 | bool reverse, oc_outer_state *outer) |
5853 | { |
5854 | unsigned HOST_WIDE_INT cnt; |
5855 | constructor_elt *ce; |
5856 | oc_local_state local; |
5857 | |
5858 | /* Setup our local state to communicate with helpers. */ |
5859 | local.exp = exp; |
5860 | local.type = TREE_TYPE (exp); |
5861 | local.size = size; |
5862 | local.align = align; |
5863 | if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type)) |
5864 | local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type)); |
5865 | else |
5866 | local.min_index = integer_zero_node; |
5867 | |
5868 | local.total_bytes = 0; |
5869 | local.byte_buffer_in_use = outer != NULL; |
5870 | local.byte = outer ? outer->byte : 0; |
5871 | local.last_relative_index = -1; |
5872 | /* The storage order is specified for every aggregate type. */ |
5873 | if (AGGREGATE_TYPE_P (local.type)) |
5874 | local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type); |
5875 | else |
5876 | local.reverse = reverse; |
5877 | |
5878 | gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT); |
5879 | |
5880 | /* As CE goes through the elements of the constant, FIELD goes through the |
5881 | structure fields if the constant is a structure. If the constant is a |
5882 | union, we override this by getting the field from the TREE_LIST element. |
5883 | But the constant could also be an array. Then FIELD is zero. |
5884 | |
5885 | There is always a maximum of one element in the chain LINK for unions |
5886 | (even if the initializer in a source program incorrectly contains |
5887 | more one). */ |
5888 | |
5889 | if (TREE_CODE (local.type) == RECORD_TYPE) |
5890 | local.field = TYPE_FIELDS (local.type); |
5891 | else |
5892 | local.field = NULL_TREE; |
5893 | |
5894 | for (cnt = 0; |
5895 | vec_safe_iterate (CONSTRUCTOR_ELTS (exp), ix: cnt, ptr: &ce); |
5896 | cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0) |
5897 | { |
5898 | local.val = ce->value; |
5899 | local.index = NULL_TREE; |
5900 | |
5901 | /* The element in a union constructor specifies the proper field |
5902 | or index. */ |
5903 | if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE) |
5904 | local.field = ce->index; |
5905 | |
5906 | else if (TREE_CODE (local.type) == ARRAY_TYPE) |
5907 | local.index = ce->index; |
5908 | |
5909 | if (local.field && flag_verbose_asm) |
5910 | fprintf (stream: asm_out_file, format: "%s %s:\n" , |
5911 | ASM_COMMENT_START, |
5912 | DECL_NAME (local.field) |
5913 | ? IDENTIFIER_POINTER (DECL_NAME (local.field)) |
5914 | : "<anonymous>" ); |
5915 | |
5916 | /* Eliminate the marker that makes a cast not be an lvalue. */ |
5917 | if (local.val != NULL_TREE) |
5918 | STRIP_NOPS (local.val); |
5919 | |
5920 | /* Output the current element, using the appropriate helper ... */ |
5921 | |
5922 | /* For an array slice not part of an outer bitfield. */ |
5923 | if (!outer |
5924 | && local.index != NULL_TREE |
5925 | && TREE_CODE (local.index) == RANGE_EXPR) |
5926 | output_constructor_array_range (local: &local); |
5927 | |
5928 | /* For a field that is neither a true bitfield nor part of an outer one, |
5929 | known to be at least byte aligned and multiple-of-bytes long. */ |
5930 | else if (!outer |
5931 | && (local.field == NULL_TREE |
5932 | || !CONSTRUCTOR_BITFIELD_P (local.field))) |
5933 | output_constructor_regular_field (local: &local); |
5934 | |
5935 | /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are |
5936 | supported for scalar fields, so we may need to convert first. */ |
5937 | else |
5938 | { |
5939 | if (TREE_CODE (local.val) == REAL_CST) |
5940 | local.val |
5941 | = fold_unary (VIEW_CONVERT_EXPR, |
5942 | build_nonstandard_integer_type |
5943 | (TYPE_PRECISION (TREE_TYPE (local.val)), 0), |
5944 | local.val); |
5945 | output_constructor_bitfield (local: &local, bit_offset: outer ? outer->bit_offset : 0); |
5946 | } |
5947 | } |
5948 | |
5949 | /* If we are not at toplevel, save the pending data for our caller. |
5950 | Otherwise output the pending data and padding zeros as needed. */ |
5951 | if (outer) |
5952 | outer->byte = local.byte; |
5953 | else |
5954 | { |
5955 | if (local.byte_buffer_in_use) |
5956 | { |
5957 | assemble_integer (GEN_INT (local.byte), size: 1, BITS_PER_UNIT, force: 1); |
5958 | local.total_bytes++; |
5959 | } |
5960 | |
5961 | if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size) |
5962 | { |
5963 | assemble_zeros (size: local.size - local.total_bytes); |
5964 | local.total_bytes = local.size; |
5965 | } |
5966 | } |
5967 | |
5968 | return local.total_bytes; |
5969 | } |
5970 | |
5971 | /* Mark DECL as weak. */ |
5972 | |
5973 | static void |
5974 | mark_weak (tree decl) |
5975 | { |
5976 | if (DECL_WEAK (decl)) |
5977 | return; |
5978 | |
5979 | struct symtab_node *n = symtab_node::get (decl); |
5980 | if (n && n->refuse_visibility_changes) |
5981 | error ("%qD declared weak after being used" , decl); |
5982 | DECL_WEAK (decl) = 1; |
5983 | |
5984 | if (DECL_RTL_SET_P (decl) |
5985 | && MEM_P (DECL_RTL (decl)) |
5986 | && XEXP (DECL_RTL (decl), 0) |
5987 | && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF) |
5988 | SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1; |
5989 | } |
5990 | |
5991 | /* Merge weak status between NEWDECL and OLDDECL. */ |
5992 | |
5993 | void |
5994 | merge_weak (tree newdecl, tree olddecl) |
5995 | { |
5996 | if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl)) |
5997 | { |
5998 | if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK) |
5999 | { |
6000 | tree *pwd; |
6001 | /* We put the NEWDECL on the weak_decls list at some point |
6002 | and OLDDECL as well. Keep just OLDDECL on the list. */ |
6003 | for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd)) |
6004 | if (TREE_VALUE (*pwd) == newdecl) |
6005 | { |
6006 | *pwd = TREE_CHAIN (*pwd); |
6007 | break; |
6008 | } |
6009 | } |
6010 | return; |
6011 | } |
6012 | |
6013 | if (DECL_WEAK (newdecl)) |
6014 | { |
6015 | tree wd; |
6016 | |
6017 | /* NEWDECL is weak, but OLDDECL is not. */ |
6018 | |
6019 | /* If we already output the OLDDECL, we're in trouble; we can't |
6020 | go back and make it weak. This should never happen in |
6021 | unit-at-a-time compilation. */ |
6022 | gcc_assert (!TREE_ASM_WRITTEN (olddecl)); |
6023 | |
6024 | /* If we've already generated rtl referencing OLDDECL, we may |
6025 | have done so in a way that will not function properly with |
6026 | a weak symbol. Again in unit-at-a-time this should be |
6027 | impossible. */ |
6028 | gcc_assert (!TREE_USED (olddecl) |
6029 | || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl))); |
6030 | |
6031 | /* PR 49899: You cannot convert a static function into a weak, public function. */ |
6032 | if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl)) |
6033 | error ("weak declaration of %q+D being applied to a already " |
6034 | "existing, static definition" , newdecl); |
6035 | |
6036 | if (TARGET_SUPPORTS_WEAK) |
6037 | { |
6038 | /* We put the NEWDECL on the weak_decls list at some point. |
6039 | Replace it with the OLDDECL. */ |
6040 | for (wd = weak_decls; wd; wd = TREE_CHAIN (wd)) |
6041 | if (TREE_VALUE (wd) == newdecl) |
6042 | { |
6043 | TREE_VALUE (wd) = olddecl; |
6044 | break; |
6045 | } |
6046 | /* We may not find the entry on the list. If NEWDECL is a |
6047 | weak alias, then we will have already called |
6048 | globalize_decl to remove the entry; in that case, we do |
6049 | not need to do anything. */ |
6050 | } |
6051 | |
6052 | /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */ |
6053 | mark_weak (decl: olddecl); |
6054 | } |
6055 | else |
6056 | /* OLDDECL was weak, but NEWDECL was not explicitly marked as |
6057 | weak. Just update NEWDECL to indicate that it's weak too. */ |
6058 | mark_weak (decl: newdecl); |
6059 | } |
6060 | |
6061 | /* Declare DECL to be a weak symbol. */ |
6062 | |
6063 | void |
6064 | declare_weak (tree decl) |
6065 | { |
6066 | /* With -fsyntax-only, TREE_ASM_WRITTEN might be set on certain function |
6067 | decls earlier than normally, but as with -fsyntax-only nothing is really |
6068 | emitted, there is no harm in marking it weak later. */ |
6069 | gcc_assert (TREE_CODE (decl) != FUNCTION_DECL |
6070 | || !TREE_ASM_WRITTEN (decl) |
6071 | || flag_syntax_only); |
6072 | if (! TREE_PUBLIC (decl)) |
6073 | { |
6074 | error ("weak declaration of %q+D must be public" , decl); |
6075 | return; |
6076 | } |
6077 | else if (!TARGET_SUPPORTS_WEAK) |
6078 | warning (0, "weak declaration of %q+D not supported" , decl); |
6079 | |
6080 | mark_weak (decl); |
6081 | if (!lookup_attribute (attr_name: "weak" , DECL_ATTRIBUTES (decl))) |
6082 | DECL_ATTRIBUTES (decl) |
6083 | = tree_cons (get_identifier ("weak" ), NULL, DECL_ATTRIBUTES (decl)); |
6084 | } |
6085 | |
6086 | static void |
6087 | weak_finish_1 (tree decl) |
6088 | { |
6089 | #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL) |
6090 | const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); |
6091 | #endif |
6092 | |
6093 | if (! TREE_USED (decl)) |
6094 | return; |
6095 | |
6096 | #ifdef ASM_WEAKEN_DECL |
6097 | ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL); |
6098 | #else |
6099 | #ifdef ASM_WEAKEN_LABEL |
6100 | ASM_WEAKEN_LABEL (asm_out_file, name); |
6101 | #else |
6102 | #ifdef ASM_OUTPUT_WEAK_ALIAS |
6103 | { |
6104 | static bool warn_once = 0; |
6105 | if (! warn_once) |
6106 | { |
6107 | warning (0, "only weak aliases are supported in this configuration" ); |
6108 | warn_once = 1; |
6109 | } |
6110 | return; |
6111 | } |
6112 | #endif |
6113 | #endif |
6114 | #endif |
6115 | } |
6116 | |
6117 | /* Fiven an assembly name, find the decl it is associated with. */ |
6118 | static tree |
6119 | find_decl (tree target) |
6120 | { |
6121 | symtab_node *node = symtab_node::get_for_asmname (asmname: target); |
6122 | if (node) |
6123 | return node->decl; |
6124 | return NULL_TREE; |
6125 | } |
6126 | |
6127 | /* This TREE_LIST contains weakref targets. */ |
6128 | |
6129 | static GTY(()) tree weakref_targets; |
6130 | |
6131 | /* Emit any pending weak declarations. */ |
6132 | |
6133 | void |
6134 | weak_finish (void) |
6135 | { |
6136 | tree t; |
6137 | |
6138 | for (t = weakref_targets; t; t = TREE_CHAIN (t)) |
6139 | { |
6140 | tree alias_decl = TREE_PURPOSE (t); |
6141 | tree target = ultimate_transparent_alias_target (alias: &TREE_VALUE (t)); |
6142 | |
6143 | if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl)) |
6144 | || TREE_SYMBOL_REFERENCED (target)) |
6145 | /* Remove alias_decl from the weak list, but leave entries for |
6146 | the target alone. */ |
6147 | target = NULL_TREE; |
6148 | #ifndef ASM_OUTPUT_WEAKREF |
6149 | else if (! TREE_SYMBOL_REFERENCED (target)) |
6150 | { |
6151 | /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not |
6152 | defined, otherwise we and weak_finish_1 would use |
6153 | different macros. */ |
6154 | # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL |
6155 | ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target)); |
6156 | # else |
6157 | tree decl = find_decl (target); |
6158 | |
6159 | if (! decl) |
6160 | { |
6161 | decl = build_decl (DECL_SOURCE_LOCATION (alias_decl), |
6162 | TREE_CODE (alias_decl), target, |
6163 | TREE_TYPE (alias_decl)); |
6164 | |
6165 | DECL_EXTERNAL (decl) = 1; |
6166 | TREE_PUBLIC (decl) = 1; |
6167 | DECL_ARTIFICIAL (decl) = 1; |
6168 | TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl); |
6169 | TREE_USED (decl) = 1; |
6170 | } |
6171 | |
6172 | weak_finish_1 (decl); |
6173 | # endif |
6174 | } |
6175 | #endif |
6176 | |
6177 | { |
6178 | tree *p; |
6179 | tree t2; |
6180 | |
6181 | /* Remove the alias and the target from the pending weak list |
6182 | so that we do not emit any .weak directives for the former, |
6183 | nor multiple .weak directives for the latter. */ |
6184 | for (p = &weak_decls; (t2 = *p) ; ) |
6185 | { |
6186 | if (TREE_VALUE (t2) == alias_decl |
6187 | || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2))) |
6188 | *p = TREE_CHAIN (t2); |
6189 | else |
6190 | p = &TREE_CHAIN (t2); |
6191 | } |
6192 | |
6193 | /* Remove other weakrefs to the same target, to speed things up. */ |
6194 | for (p = &TREE_CHAIN (t); (t2 = *p) ; ) |
6195 | { |
6196 | if (target == ultimate_transparent_alias_target (alias: &TREE_VALUE (t2))) |
6197 | *p = TREE_CHAIN (t2); |
6198 | else |
6199 | p = &TREE_CHAIN (t2); |
6200 | } |
6201 | } |
6202 | } |
6203 | |
6204 | for (t = weak_decls; t; t = TREE_CHAIN (t)) |
6205 | { |
6206 | tree decl = TREE_VALUE (t); |
6207 | |
6208 | weak_finish_1 (decl); |
6209 | } |
6210 | } |
6211 | |
6212 | /* Emit the assembly bits to indicate that DECL is globally visible. */ |
6213 | |
6214 | static void |
6215 | globalize_decl (tree decl) |
6216 | { |
6217 | |
6218 | #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL) |
6219 | if (DECL_WEAK (decl)) |
6220 | { |
6221 | const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); |
6222 | tree *p, t; |
6223 | |
6224 | #ifdef ASM_WEAKEN_DECL |
6225 | ASM_WEAKEN_DECL (asm_out_file, decl, name, 0); |
6226 | #else |
6227 | ASM_WEAKEN_LABEL (asm_out_file, name); |
6228 | #endif |
6229 | |
6230 | /* Remove this function from the pending weak list so that |
6231 | we do not emit multiple .weak directives for it. */ |
6232 | for (p = &weak_decls; (t = *p) ; ) |
6233 | { |
6234 | if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))) |
6235 | *p = TREE_CHAIN (t); |
6236 | else |
6237 | p = &TREE_CHAIN (t); |
6238 | } |
6239 | |
6240 | /* Remove weakrefs to the same target from the pending weakref |
6241 | list, for the same reason. */ |
6242 | for (p = &weakref_targets; (t = *p) ; ) |
6243 | { |
6244 | if (DECL_ASSEMBLER_NAME (decl) |
6245 | == ultimate_transparent_alias_target (alias: &TREE_VALUE (t))) |
6246 | *p = TREE_CHAIN (t); |
6247 | else |
6248 | p = &TREE_CHAIN (t); |
6249 | } |
6250 | |
6251 | return; |
6252 | } |
6253 | #endif |
6254 | |
6255 | targetm.asm_out.globalize_decl_name (asm_out_file, decl); |
6256 | } |
6257 | |
6258 | vec<alias_pair, va_gc> *alias_pairs; |
6259 | |
6260 | /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF |
6261 | or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose |
6262 | tree node is DECL to have the value of the tree node TARGET. */ |
6263 | |
6264 | void |
6265 | do_assemble_alias (tree decl, tree target) |
6266 | { |
6267 | tree id; |
6268 | |
6269 | /* Emulated TLS had better not get this var. */ |
6270 | gcc_assert (!(!targetm.have_tls |
6271 | && VAR_P (decl) |
6272 | && DECL_THREAD_LOCAL_P (decl))); |
6273 | |
6274 | if (TREE_ASM_WRITTEN (decl)) |
6275 | return; |
6276 | |
6277 | id = DECL_ASSEMBLER_NAME (decl); |
6278 | ultimate_transparent_alias_target (alias: &id); |
6279 | ultimate_transparent_alias_target (alias: &target); |
6280 | |
6281 | /* We must force creation of DECL_RTL for debug info generation, even though |
6282 | we don't use it here. */ |
6283 | make_decl_rtl (decl); |
6284 | |
6285 | TREE_ASM_WRITTEN (decl) = 1; |
6286 | TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1; |
6287 | TREE_ASM_WRITTEN (id) = 1; |
6288 | |
6289 | if (lookup_attribute (attr_name: "weakref" , DECL_ATTRIBUTES (decl))) |
6290 | { |
6291 | if (!TREE_SYMBOL_REFERENCED (target)) |
6292 | weakref_targets = tree_cons (decl, target, weakref_targets); |
6293 | |
6294 | #ifdef ASM_OUTPUT_WEAKREF |
6295 | ASM_OUTPUT_WEAKREF (asm_out_file, decl, |
6296 | IDENTIFIER_POINTER (id), |
6297 | IDENTIFIER_POINTER (target)); |
6298 | #else |
6299 | if (!TARGET_SUPPORTS_WEAK) |
6300 | { |
6301 | error_at (DECL_SOURCE_LOCATION (decl), |
6302 | "%qs is not supported in this configuration" , "weakref " ); |
6303 | return; |
6304 | } |
6305 | #endif |
6306 | return; |
6307 | } |
6308 | |
6309 | #ifdef ASM_OUTPUT_DEF |
6310 | tree orig_decl = decl; |
6311 | |
6312 | /* Make name accessible from other files, if appropriate. */ |
6313 | |
6314 | if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl)) |
6315 | { |
6316 | globalize_decl (decl); |
6317 | maybe_assemble_visibility (decl); |
6318 | } |
6319 | if (TREE_CODE (decl) == FUNCTION_DECL |
6320 | && cgraph_node::get (decl)->ifunc_resolver) |
6321 | { |
6322 | #if defined (ASM_OUTPUT_TYPE_DIRECTIVE) |
6323 | if (targetm.has_ifunc_p ()) |
6324 | ASM_OUTPUT_TYPE_DIRECTIVE |
6325 | (asm_out_file, IDENTIFIER_POINTER (id), |
6326 | IFUNC_ASM_TYPE); |
6327 | else |
6328 | #endif |
6329 | error_at (DECL_SOURCE_LOCATION (decl), |
6330 | "%qs is not supported on this target" , "ifunc" ); |
6331 | } |
6332 | |
6333 | # ifdef ASM_OUTPUT_DEF_FROM_DECLS |
6334 | ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target); |
6335 | # else |
6336 | ASM_OUTPUT_DEF (asm_out_file, |
6337 | IDENTIFIER_POINTER (id), |
6338 | IDENTIFIER_POINTER (target)); |
6339 | # endif |
6340 | /* If symbol aliases aren't actually supported... */ |
6341 | if (!TARGET_SUPPORTS_ALIASES) |
6342 | /* ..., 'ASM_OUTPUT_DEF{,_FROM_DECLS}' better have raised an error. */ |
6343 | gcc_checking_assert (seen_error ()); |
6344 | #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL) |
6345 | { |
6346 | const char *name; |
6347 | tree *p, t; |
6348 | |
6349 | name = IDENTIFIER_POINTER (id); |
6350 | # ifdef ASM_WEAKEN_DECL |
6351 | ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target)); |
6352 | # else |
6353 | ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target)); |
6354 | # endif |
6355 | /* Remove this function from the pending weak list so that |
6356 | we do not emit multiple .weak directives for it. */ |
6357 | for (p = &weak_decls; (t = *p) ; ) |
6358 | if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)) |
6359 | || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t))) |
6360 | *p = TREE_CHAIN (t); |
6361 | else |
6362 | p = &TREE_CHAIN (t); |
6363 | |
6364 | /* Remove weakrefs to the same target from the pending weakref |
6365 | list, for the same reason. */ |
6366 | for (p = &weakref_targets; (t = *p) ; ) |
6367 | { |
6368 | if (id == ultimate_transparent_alias_target (&TREE_VALUE (t))) |
6369 | *p = TREE_CHAIN (t); |
6370 | else |
6371 | p = &TREE_CHAIN (t); |
6372 | } |
6373 | } |
6374 | #endif |
6375 | } |
6376 | |
6377 | /* Output .symver directive. */ |
6378 | |
6379 | void |
6380 | do_assemble_symver (tree decl, tree target) |
6381 | { |
6382 | tree id = DECL_ASSEMBLER_NAME (decl); |
6383 | ultimate_transparent_alias_target (alias: &id); |
6384 | ultimate_transparent_alias_target (alias: &target); |
6385 | #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE |
6386 | ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file, |
6387 | IDENTIFIER_POINTER (target), |
6388 | IDENTIFIER_POINTER (id)); |
6389 | #else |
6390 | error ("symver is only supported on ELF platforms" ); |
6391 | #endif |
6392 | } |
6393 | |
6394 | /* Emit an assembler directive to make the symbol for DECL an alias to |
6395 | the symbol for TARGET. */ |
6396 | |
6397 | void |
6398 | assemble_alias (tree decl, tree target) |
6399 | { |
6400 | tree target_decl; |
6401 | |
6402 | if (lookup_attribute (attr_name: "weakref" , DECL_ATTRIBUTES (decl))) |
6403 | { |
6404 | tree alias = DECL_ASSEMBLER_NAME (decl); |
6405 | |
6406 | ultimate_transparent_alias_target (alias: &target); |
6407 | |
6408 | if (alias == target) |
6409 | error ("%qs symbol %q+D ultimately targets itself" , "weakref" , decl); |
6410 | if (TREE_PUBLIC (decl)) |
6411 | error ("%qs symbol %q+D must have static linkage" , "weakref" , decl); |
6412 | } |
6413 | else if (!TARGET_SUPPORTS_ALIASES) |
6414 | { |
6415 | # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL) |
6416 | error_at (DECL_SOURCE_LOCATION (decl), |
6417 | "alias definitions not supported in this configuration" ); |
6418 | TREE_ASM_WRITTEN (decl) = 1; |
6419 | return; |
6420 | # else |
6421 | if (!DECL_WEAK (decl)) |
6422 | { |
6423 | /* NB: ifunc_resolver isn't set when an error is detected. */ |
6424 | if (TREE_CODE (decl) == FUNCTION_DECL |
6425 | && lookup_attribute (attr_name: "ifunc" , DECL_ATTRIBUTES (decl))) |
6426 | error_at (DECL_SOURCE_LOCATION (decl), |
6427 | "%qs is not supported in this configuration" , "ifunc" ); |
6428 | else |
6429 | error_at (DECL_SOURCE_LOCATION (decl), |
6430 | "only weak aliases are supported in this configuration" ); |
6431 | TREE_ASM_WRITTEN (decl) = 1; |
6432 | return; |
6433 | } |
6434 | # endif |
6435 | gcc_unreachable (); |
6436 | } |
6437 | TREE_USED (decl) = 1; |
6438 | |
6439 | /* Allow aliases to aliases. */ |
6440 | if (TREE_CODE (decl) == FUNCTION_DECL) |
6441 | cgraph_node::get_create (decl)->alias = true; |
6442 | else |
6443 | varpool_node::get_create (decl)->alias = true; |
6444 | |
6445 | /* If the target has already been emitted, we don't have to queue the |
6446 | alias. This saves a tad of memory. */ |
6447 | if (symtab->global_info_ready) |
6448 | target_decl = find_decl (target); |
6449 | else |
6450 | target_decl= NULL; |
6451 | if ((target_decl && TREE_ASM_WRITTEN (target_decl)) |
6452 | || symtab->state >= EXPANSION) |
6453 | do_assemble_alias (decl, target); |
6454 | else |
6455 | { |
6456 | alias_pair p = {.decl: decl, .target: target}; |
6457 | vec_safe_push (v&: alias_pairs, obj: p); |
6458 | } |
6459 | } |
6460 | |
6461 | /* Record and output a table of translations from original function |
6462 | to its transaction aware clone. Note that tm_pure functions are |
6463 | considered to be their own clone. */ |
6464 | |
6465 | struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map> |
6466 | { |
6467 | static hashval_t hash (tree_map *m) { return tree_map_hash (m); } |
6468 | static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); } |
6469 | |
6470 | static int |
6471 | keep_cache_entry (tree_map *&e) |
6472 | { |
6473 | return ggc_marked_p (e->base.from); |
6474 | } |
6475 | }; |
6476 | |
6477 | static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash; |
6478 | |
6479 | void |
6480 | record_tm_clone_pair (tree o, tree n) |
6481 | { |
6482 | struct tree_map **slot, *h; |
6483 | |
6484 | if (tm_clone_hash == NULL) |
6485 | tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (n: 32); |
6486 | |
6487 | h = ggc_alloc<tree_map> (); |
6488 | h->hash = htab_hash_pointer (o); |
6489 | h->base.from = o; |
6490 | h->to = n; |
6491 | |
6492 | slot = tm_clone_hash->find_slot_with_hash (comparable: h, hash: h->hash, insert: INSERT); |
6493 | *slot = h; |
6494 | } |
6495 | |
6496 | tree |
6497 | get_tm_clone_pair (tree o) |
6498 | { |
6499 | if (tm_clone_hash) |
6500 | { |
6501 | struct tree_map *h, in; |
6502 | |
6503 | in.base.from = o; |
6504 | in.hash = htab_hash_pointer (o); |
6505 | h = tm_clone_hash->find_with_hash (comparable: &in, hash: in.hash); |
6506 | if (h) |
6507 | return h->to; |
6508 | } |
6509 | return NULL_TREE; |
6510 | } |
6511 | |
6512 | struct tm_alias_pair |
6513 | { |
6514 | unsigned int uid; |
6515 | tree from; |
6516 | tree to; |
6517 | }; |
6518 | |
6519 | |
6520 | /* Dump the actual pairs to the .tm_clone_table section. */ |
6521 | |
6522 | static void |
6523 | dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs) |
6524 | { |
6525 | unsigned i; |
6526 | tm_alias_pair *p; |
6527 | bool switched = false; |
6528 | |
6529 | FOR_EACH_VEC_ELT (tm_alias_pairs, i, p) |
6530 | { |
6531 | tree src = p->from; |
6532 | tree dst = p->to; |
6533 | struct cgraph_node *src_n = cgraph_node::get (decl: src); |
6534 | struct cgraph_node *dst_n = cgraph_node::get (decl: dst); |
6535 | |
6536 | /* The function ipa_tm_create_version() marks the clone as needed if |
6537 | the original function was needed. But we also mark the clone as |
6538 | needed if we ever called the clone indirectly through |
6539 | TM_GETTMCLONE. If neither of these are true, we didn't generate |
6540 | a clone, and we didn't call it indirectly... no sense keeping it |
6541 | in the clone table. */ |
6542 | if (!dst_n || !dst_n->definition) |
6543 | continue; |
6544 | |
6545 | /* This covers the case where we have optimized the original |
6546 | function away, and only access the transactional clone. */ |
6547 | if (!src_n || !src_n->definition) |
6548 | continue; |
6549 | |
6550 | if (!switched) |
6551 | { |
6552 | switch_to_section (targetm.asm_out.tm_clone_table_section ()); |
6553 | assemble_align (POINTER_SIZE); |
6554 | switched = true; |
6555 | } |
6556 | |
6557 | assemble_integer (XEXP (DECL_RTL (src), 0), |
6558 | POINTER_SIZE_UNITS, POINTER_SIZE, force: 1); |
6559 | assemble_integer (XEXP (DECL_RTL (dst), 0), |
6560 | POINTER_SIZE_UNITS, POINTER_SIZE, force: 1); |
6561 | } |
6562 | } |
6563 | |
6564 | /* Provide a default for the tm_clone_table section. */ |
6565 | |
6566 | section * |
6567 | default_clone_table_section (void) |
6568 | { |
6569 | return get_named_section (NULL, name: ".tm_clone_table" , reloc: 3); |
6570 | } |
6571 | |
6572 | /* Helper comparison function for qsorting by the DECL_UID stored in |
6573 | alias_pair->emitted_diags. */ |
6574 | |
6575 | static int |
6576 | tm_alias_pair_cmp (const void *x, const void *y) |
6577 | { |
6578 | const tm_alias_pair *p1 = (const tm_alias_pair *) x; |
6579 | const tm_alias_pair *p2 = (const tm_alias_pair *) y; |
6580 | if (p1->uid < p2->uid) |
6581 | return -1; |
6582 | if (p1->uid > p2->uid) |
6583 | return 1; |
6584 | return 0; |
6585 | } |
6586 | |
6587 | void |
6588 | finish_tm_clone_pairs (void) |
6589 | { |
6590 | vec<tm_alias_pair> tm_alias_pairs = vNULL; |
6591 | |
6592 | if (tm_clone_hash == NULL) |
6593 | return; |
6594 | |
6595 | /* We need a determenistic order for the .tm_clone_table, otherwise |
6596 | we will get bootstrap comparison failures, so dump the hash table |
6597 | to a vector, sort it, and dump the vector. */ |
6598 | |
6599 | /* Dump the hashtable to a vector. */ |
6600 | tree_map *map; |
6601 | hash_table<tm_clone_hasher>::iterator iter; |
6602 | FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter) |
6603 | { |
6604 | tm_alias_pair p = {DECL_UID (map->base.from), .from: map->base.from, .to: map->to}; |
6605 | tm_alias_pairs.safe_push (obj: p); |
6606 | } |
6607 | /* Sort it. */ |
6608 | tm_alias_pairs.qsort (tm_alias_pair_cmp); |
6609 | |
6610 | /* Dump it. */ |
6611 | dump_tm_clone_pairs (tm_alias_pairs); |
6612 | |
6613 | tm_clone_hash->empty (); |
6614 | tm_clone_hash = NULL; |
6615 | tm_alias_pairs.release (); |
6616 | } |
6617 | |
6618 | |
6619 | /* Emit an assembler directive to set symbol for DECL visibility to |
6620 | the visibility type VIS, which must not be VISIBILITY_DEFAULT. */ |
6621 | |
6622 | void |
6623 | default_assemble_visibility (tree decl ATTRIBUTE_UNUSED, |
6624 | int vis ATTRIBUTE_UNUSED) |
6625 | { |
6626 | #ifdef HAVE_GAS_HIDDEN |
6627 | static const char * const visibility_types[] = { |
6628 | NULL, "protected" , "hidden" , "internal" |
6629 | }; |
6630 | |
6631 | const char *name, *type; |
6632 | tree id; |
6633 | |
6634 | id = DECL_ASSEMBLER_NAME (decl); |
6635 | ultimate_transparent_alias_target (alias: &id); |
6636 | name = IDENTIFIER_POINTER (id); |
6637 | |
6638 | type = visibility_types[vis]; |
6639 | |
6640 | fprintf (stream: asm_out_file, format: "\t.%s\t" , type); |
6641 | assemble_name (file: asm_out_file, name); |
6642 | fprintf (stream: asm_out_file, format: "\n" ); |
6643 | #else |
6644 | if (!DECL_ARTIFICIAL (decl)) |
6645 | warning (OPT_Wattributes, "visibility attribute not supported " |
6646 | "in this configuration; ignored" ); |
6647 | #endif |
6648 | } |
6649 | |
6650 | /* A helper function to call assemble_visibility when needed for a decl. */ |
6651 | |
6652 | bool |
6653 | maybe_assemble_visibility (tree decl) |
6654 | { |
6655 | enum symbol_visibility vis = DECL_VISIBILITY (decl); |
6656 | if (vis != VISIBILITY_DEFAULT) |
6657 | { |
6658 | targetm.asm_out.assemble_visibility (decl, vis); |
6659 | return true; |
6660 | } |
6661 | else |
6662 | return false; |
6663 | } |
6664 | |
6665 | /* Returns true if the target configuration supports defining public symbols |
6666 | so that one of them will be chosen at link time instead of generating a |
6667 | multiply-defined symbol error, whether through the use of weak symbols or |
6668 | a target-specific mechanism for having duplicates discarded. */ |
6669 | |
6670 | bool |
6671 | supports_one_only (void) |
6672 | { |
6673 | if (SUPPORTS_ONE_ONLY) |
6674 | return true; |
6675 | if (TARGET_SUPPORTS_WEAK) |
6676 | return true; |
6677 | return false; |
6678 | } |
6679 | |
6680 | /* Set up DECL as a public symbol that can be defined in multiple |
6681 | translation units without generating a linker error. */ |
6682 | |
6683 | void |
6684 | make_decl_one_only (tree decl, tree comdat_group) |
6685 | { |
6686 | struct symtab_node *symbol; |
6687 | gcc_assert (VAR_OR_FUNCTION_DECL_P (decl)); |
6688 | |
6689 | TREE_PUBLIC (decl) = 1; |
6690 | |
6691 | if (VAR_P (decl)) |
6692 | symbol = varpool_node::get_create (decl); |
6693 | else |
6694 | symbol = cgraph_node::get_create (decl); |
6695 | |
6696 | if (SUPPORTS_ONE_ONLY) |
6697 | { |
6698 | #ifdef MAKE_DECL_ONE_ONLY |
6699 | MAKE_DECL_ONE_ONLY (decl); |
6700 | #endif |
6701 | symbol->set_comdat_group (comdat_group); |
6702 | } |
6703 | else if (VAR_P (decl) |
6704 | && (DECL_INITIAL (decl) == 0 |
6705 | || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node))) |
6706 | DECL_COMMON (decl) = 1; |
6707 | else |
6708 | { |
6709 | gcc_assert (TARGET_SUPPORTS_WEAK); |
6710 | DECL_WEAK (decl) = 1; |
6711 | } |
6712 | } |
6713 | |
6714 | void |
6715 | init_varasm_once (void) |
6716 | { |
6717 | section_htab = hash_table<section_hasher>::create_ggc (n: 31); |
6718 | object_block_htab = hash_table<object_block_hasher>::create_ggc (n: 31); |
6719 | const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (n: 1009); |
6720 | |
6721 | shared_constant_pool = create_constant_pool (); |
6722 | |
6723 | #ifdef TEXT_SECTION_ASM_OP |
6724 | text_section = get_unnamed_section (flags: SECTION_CODE, callback: output_section_asm_op, |
6725 | TEXT_SECTION_ASM_OP); |
6726 | #endif |
6727 | |
6728 | #ifdef DATA_SECTION_ASM_OP |
6729 | data_section = get_unnamed_section (flags: SECTION_WRITE, callback: output_section_asm_op, |
6730 | DATA_SECTION_ASM_OP); |
6731 | #endif |
6732 | |
6733 | #ifdef SDATA_SECTION_ASM_OP |
6734 | sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op, |
6735 | SDATA_SECTION_ASM_OP); |
6736 | #endif |
6737 | |
6738 | #ifdef READONLY_DATA_SECTION_ASM_OP |
6739 | readonly_data_section = get_unnamed_section (flags: 0, callback: output_section_asm_op, |
6740 | READONLY_DATA_SECTION_ASM_OP); |
6741 | #endif |
6742 | |
6743 | #ifdef CTORS_SECTION_ASM_OP |
6744 | ctors_section = get_unnamed_section (0, output_section_asm_op, |
6745 | CTORS_SECTION_ASM_OP); |
6746 | #endif |
6747 | |
6748 | #ifdef DTORS_SECTION_ASM_OP |
6749 | dtors_section = get_unnamed_section (0, output_section_asm_op, |
6750 | DTORS_SECTION_ASM_OP); |
6751 | #endif |
6752 | |
6753 | #ifdef BSS_SECTION_ASM_OP |
6754 | bss_section = get_unnamed_section (flags: SECTION_WRITE | SECTION_BSS, |
6755 | callback: output_section_asm_op, |
6756 | BSS_SECTION_ASM_OP); |
6757 | #endif |
6758 | |
6759 | #ifdef SBSS_SECTION_ASM_OP |
6760 | sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS, |
6761 | output_section_asm_op, |
6762 | SBSS_SECTION_ASM_OP); |
6763 | #endif |
6764 | |
6765 | tls_comm_section = get_noswitch_section (flags: SECTION_WRITE | SECTION_BSS |
6766 | | SECTION_COMMON, callback: emit_tls_common); |
6767 | lcomm_section = get_noswitch_section (flags: SECTION_WRITE | SECTION_BSS |
6768 | | SECTION_COMMON, callback: emit_local); |
6769 | comm_section = get_noswitch_section (flags: SECTION_WRITE | SECTION_BSS |
6770 | | SECTION_COMMON, callback: emit_common); |
6771 | |
6772 | #if defined ASM_OUTPUT_ALIGNED_BSS |
6773 | bss_noswitch_section = get_noswitch_section (flags: SECTION_WRITE | SECTION_BSS, |
6774 | callback: emit_bss); |
6775 | #endif |
6776 | |
6777 | targetm.asm_out.init_sections (); |
6778 | |
6779 | if (readonly_data_section == NULL) |
6780 | readonly_data_section = text_section; |
6781 | |
6782 | #ifdef ASM_OUTPUT_EXTERNAL |
6783 | pending_assemble_externals_set = new hash_set<tree>; |
6784 | #endif |
6785 | } |
6786 | |
6787 | /* Determine whether SYMBOL is used in any optimized function. */ |
6788 | |
6789 | static bool |
6790 | have_optimized_refs (struct symtab_node *symbol) |
6791 | { |
6792 | struct ipa_ref *ref; |
6793 | |
6794 | for (int i = 0; symbol->iterate_referring (i, ref); i++) |
6795 | { |
6796 | cgraph_node *cnode = dyn_cast <cgraph_node *> (p: ref->referring); |
6797 | |
6798 | if (cnode && opt_for_fn (cnode->decl, optimize)) |
6799 | return true; |
6800 | } |
6801 | |
6802 | return false; |
6803 | } |
6804 | |
6805 | /* Check if promoting general-dynamic TLS access model to local-dynamic is |
6806 | desirable for DECL. */ |
6807 | |
6808 | static bool |
6809 | optimize_dyn_tls_for_decl_p (const_tree decl) |
6810 | { |
6811 | if (cfun) |
6812 | return optimize; |
6813 | return symtab->state >= IPA && have_optimized_refs (symbol: symtab_node::get (decl)); |
6814 | } |
6815 | |
6816 | |
6817 | enum tls_model |
6818 | decl_default_tls_model (const_tree decl) |
6819 | { |
6820 | enum tls_model kind; |
6821 | bool is_local; |
6822 | |
6823 | is_local = targetm.binds_local_p (decl); |
6824 | if (!flag_shlib) |
6825 | { |
6826 | if (is_local) |
6827 | kind = TLS_MODEL_LOCAL_EXEC; |
6828 | else |
6829 | kind = TLS_MODEL_INITIAL_EXEC; |
6830 | } |
6831 | |
6832 | /* Local dynamic is inefficient when we're not combining the |
6833 | parts of the address. */ |
6834 | else if (is_local && optimize_dyn_tls_for_decl_p (decl)) |
6835 | kind = TLS_MODEL_LOCAL_DYNAMIC; |
6836 | else |
6837 | kind = TLS_MODEL_GLOBAL_DYNAMIC; |
6838 | if (kind < flag_tls_default) |
6839 | kind = flag_tls_default; |
6840 | |
6841 | return kind; |
6842 | } |
6843 | |
6844 | /* Select a set of attributes for section NAME based on the properties |
6845 | of DECL and whether or not RELOC indicates that DECL's initializer |
6846 | might contain runtime relocations. |
6847 | |
6848 | We make the section read-only and executable for a function decl, |
6849 | read-only for a const data decl, and writable for a non-const data decl. */ |
6850 | |
6851 | unsigned int |
6852 | default_section_type_flags (tree decl, const char *name, int reloc) |
6853 | { |
6854 | unsigned int flags; |
6855 | |
6856 | if (decl && TREE_CODE (decl) == FUNCTION_DECL) |
6857 | flags = SECTION_CODE; |
6858 | else if (decl) |
6859 | { |
6860 | enum section_category category |
6861 | = categorize_decl_for_section (decl, reloc); |
6862 | if (decl_readonly_section_1 (category)) |
6863 | flags = 0; |
6864 | else if (category == SECCAT_DATA_REL_RO |
6865 | || category == SECCAT_DATA_REL_RO_LOCAL) |
6866 | flags = SECTION_WRITE | SECTION_RELRO; |
6867 | else |
6868 | flags = SECTION_WRITE; |
6869 | } |
6870 | else |
6871 | { |
6872 | flags = SECTION_WRITE; |
6873 | if (strcmp (s1: name, s2: ".data.rel.ro" ) == 0 |
6874 | || strcmp (s1: name, s2: ".data.rel.ro.local" ) == 0) |
6875 | flags |= SECTION_RELRO; |
6876 | } |
6877 | |
6878 | if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl)) |
6879 | flags |= SECTION_LINKONCE; |
6880 | |
6881 | if (strcmp (s1: name, s2: ".vtable_map_vars" ) == 0) |
6882 | flags |= SECTION_LINKONCE; |
6883 | |
6884 | if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) |
6885 | flags |= SECTION_TLS | SECTION_WRITE; |
6886 | |
6887 | if (strcmp (s1: name, s2: ".bss" ) == 0 |
6888 | || startswith (str: name, prefix: ".bss." ) |
6889 | || startswith (str: name, prefix: ".gnu.linkonce.b." ) |
6890 | || strcmp (s1: name, s2: ".persistent.bss" ) == 0 |
6891 | || strcmp (s1: name, s2: ".sbss" ) == 0 |
6892 | || startswith (str: name, prefix: ".sbss." ) |
6893 | || startswith (str: name, prefix: ".gnu.linkonce.sb." )) |
6894 | flags |= SECTION_BSS; |
6895 | |
6896 | if (strcmp (s1: name, s2: ".tdata" ) == 0 |
6897 | || startswith (str: name, prefix: ".tdata." ) |
6898 | || startswith (str: name, prefix: ".gnu.linkonce.td." )) |
6899 | flags |= SECTION_TLS; |
6900 | |
6901 | if (strcmp (s1: name, s2: ".tbss" ) == 0 |
6902 | || startswith (str: name, prefix: ".tbss." ) |
6903 | || startswith (str: name, prefix: ".gnu.linkonce.tb." )) |
6904 | flags |= SECTION_TLS | SECTION_BSS; |
6905 | |
6906 | if (strcmp (s1: name, s2: ".noinit" ) == 0) |
6907 | flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE; |
6908 | |
6909 | if (strcmp (s1: name, s2: ".persistent" ) == 0) |
6910 | flags |= SECTION_WRITE | SECTION_NOTYPE; |
6911 | |
6912 | /* Various sections have special ELF types that the assembler will |
6913 | assign by default based on the name. They are neither SHT_PROGBITS |
6914 | nor SHT_NOBITS, so when changing sections we don't want to print a |
6915 | section type (@progbits or @nobits). Rather than duplicating the |
6916 | assembler's knowledge of what those special name patterns are, just |
6917 | let the assembler choose the type if we don't know a specific |
6918 | reason to set it to something other than the default. SHT_PROGBITS |
6919 | is the default for sections whose name is not specially known to |
6920 | the assembler, so it does no harm to leave the choice to the |
6921 | assembler when @progbits is the best thing we know to use. If |
6922 | someone is silly enough to emit code or TLS variables to one of |
6923 | these sections, then don't handle them specially. |
6924 | |
6925 | default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and |
6926 | LINKONCE cases when NOTYPE is not set, so leave those to its logic. */ |
6927 | if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE)) |
6928 | && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))) |
6929 | flags |= SECTION_NOTYPE; |
6930 | |
6931 | return flags; |
6932 | } |
6933 | |
6934 | /* Return true if the target supports some form of global BSS, |
6935 | either through bss_noswitch_section, or by selecting a BSS |
6936 | section in TARGET_ASM_SELECT_SECTION. */ |
6937 | |
6938 | bool |
6939 | have_global_bss_p (void) |
6940 | { |
6941 | return bss_noswitch_section || targetm.have_switchable_bss_sections; |
6942 | } |
6943 | |
6944 | /* Output assembly to switch to section NAME with attribute FLAGS. |
6945 | Four variants for common object file formats. */ |
6946 | |
6947 | void |
6948 | default_no_named_section (const char *name ATTRIBUTE_UNUSED, |
6949 | unsigned int flags ATTRIBUTE_UNUSED, |
6950 | tree decl ATTRIBUTE_UNUSED) |
6951 | { |
6952 | /* Some object formats don't support named sections at all. The |
6953 | front-end should already have flagged this as an error. */ |
6954 | gcc_unreachable (); |
6955 | } |
6956 | |
6957 | #ifndef TLS_SECTION_ASM_FLAG |
6958 | #define TLS_SECTION_ASM_FLAG 'T' |
6959 | #endif |
6960 | |
6961 | void |
6962 | default_elf_asm_named_section (const char *name, unsigned int flags, |
6963 | tree decl) |
6964 | { |
6965 | char flagchars[11], *f = flagchars; |
6966 | unsigned int numeric_value = 0; |
6967 | |
6968 | /* If we have already declared this section, we can use an |
6969 | abbreviated form to switch back to it -- unless this section is |
6970 | part of a COMDAT groups or with SHF_GNU_RETAIN or with SHF_LINK_ORDER, |
6971 | in which case GAS requires the full declaration every time. */ |
6972 | if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) |
6973 | && !(flags & (SECTION_RETAIN | SECTION_LINK_ORDER)) |
6974 | && (flags & SECTION_DECLARED)) |
6975 | { |
6976 | fprintf (stream: asm_out_file, format: "\t.section\t%s\n" , name); |
6977 | return; |
6978 | } |
6979 | |
6980 | /* If we have a machine specific flag, then use the numeric value to pass |
6981 | this on to GAS. */ |
6982 | if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value)) |
6983 | snprintf (s: f, maxlen: sizeof (flagchars), format: "0x%08x" , numeric_value); |
6984 | else |
6985 | { |
6986 | if (!(flags & SECTION_DEBUG)) |
6987 | *f++ = 'a'; |
6988 | #if HAVE_GAS_SECTION_EXCLUDE |
6989 | if (flags & SECTION_EXCLUDE) |
6990 | *f++ = 'e'; |
6991 | #endif |
6992 | if (flags & SECTION_WRITE) |
6993 | *f++ = 'w'; |
6994 | if (flags & SECTION_CODE) |
6995 | *f++ = 'x'; |
6996 | if (flags & SECTION_SMALL) |
6997 | *f++ = 's'; |
6998 | if (flags & SECTION_MERGE) |
6999 | *f++ = 'M'; |
7000 | if (flags & SECTION_STRINGS) |
7001 | *f++ = 'S'; |
7002 | if (flags & SECTION_TLS) |
7003 | *f++ = TLS_SECTION_ASM_FLAG; |
7004 | if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) |
7005 | *f++ = 'G'; |
7006 | if (flags & SECTION_RETAIN) |
7007 | *f++ = 'R'; |
7008 | if (flags & SECTION_LINK_ORDER) |
7009 | *f++ = 'o'; |
7010 | #ifdef MACH_DEP_SECTION_ASM_FLAG |
7011 | if (flags & SECTION_MACH_DEP) |
7012 | *f++ = MACH_DEP_SECTION_ASM_FLAG; |
7013 | #endif |
7014 | *f = '\0'; |
7015 | } |
7016 | |
7017 | fprintf (stream: asm_out_file, format: "\t.section\t%s,\"%s\"" , name, flagchars); |
7018 | |
7019 | /* default_section_type_flags (above) knows which flags need special |
7020 | handling here, and sets NOTYPE when none of these apply so that the |
7021 | assembler's logic for default types can apply to user-chosen |
7022 | section names. */ |
7023 | if (!(flags & SECTION_NOTYPE)) |
7024 | { |
7025 | const char *type; |
7026 | const char *format; |
7027 | |
7028 | if (flags & SECTION_BSS) |
7029 | type = "nobits" ; |
7030 | else |
7031 | type = "progbits" ; |
7032 | |
7033 | format = ",@%s" ; |
7034 | /* On platforms that use "@" as the assembly comment character, |
7035 | use "%" instead. */ |
7036 | if (strcmp (ASM_COMMENT_START, s2: "@" ) == 0) |
7037 | format = ",%%%s" ; |
7038 | fprintf (stream: asm_out_file, format: format, type); |
7039 | |
7040 | if (flags & SECTION_ENTSIZE) |
7041 | fprintf (stream: asm_out_file, format: ",%d" , flags & SECTION_ENTSIZE); |
7042 | if (flags & SECTION_LINK_ORDER) |
7043 | { |
7044 | /* For now, only section "__patchable_function_entries" |
7045 | adopts flag SECTION_LINK_ORDER, internal label LPFE* |
7046 | was emitted in default_print_patchable_function_entry, |
7047 | just place it here for linked_to section. */ |
7048 | gcc_assert (!strcmp (name, "__patchable_function_entries" )); |
7049 | fprintf (stream: asm_out_file, format: "," ); |
7050 | char buf[256]; |
7051 | ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE" , |
7052 | current_function_funcdef_no); |
7053 | assemble_name_raw (file: asm_out_file, name: buf); |
7054 | } |
7055 | if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) |
7056 | { |
7057 | if (TREE_CODE (decl) == IDENTIFIER_NODE) |
7058 | fprintf (stream: asm_out_file, format: ",%s,comdat" , IDENTIFIER_POINTER (decl)); |
7059 | else |
7060 | fprintf (stream: asm_out_file, format: ",%s,comdat" , |
7061 | IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl))); |
7062 | } |
7063 | } |
7064 | |
7065 | putc (c: '\n', stream: asm_out_file); |
7066 | } |
7067 | |
7068 | void |
7069 | default_coff_asm_named_section (const char *name, unsigned int flags, |
7070 | tree decl ATTRIBUTE_UNUSED) |
7071 | { |
7072 | char flagchars[8], *f = flagchars; |
7073 | |
7074 | if (flags & SECTION_WRITE) |
7075 | *f++ = 'w'; |
7076 | if (flags & SECTION_CODE) |
7077 | *f++ = 'x'; |
7078 | *f = '\0'; |
7079 | |
7080 | fprintf (stream: asm_out_file, format: "\t.section\t%s,\"%s\"\n" , name, flagchars); |
7081 | } |
7082 | |
7083 | void |
7084 | default_pe_asm_named_section (const char *name, unsigned int flags, |
7085 | tree decl) |
7086 | { |
7087 | default_coff_asm_named_section (name, flags, decl); |
7088 | |
7089 | if (flags & SECTION_LINKONCE) |
7090 | { |
7091 | /* Functions may have been compiled at various levels of |
7092 | optimization so we can't use `same_size' here. |
7093 | Instead, have the linker pick one. */ |
7094 | fprintf (stream: asm_out_file, format: "\t.linkonce %s\n" , |
7095 | (flags & SECTION_CODE ? "discard" : "same_size" )); |
7096 | } |
7097 | } |
7098 | |
7099 | /* The lame default section selector. */ |
7100 | |
7101 | section * |
7102 | default_select_section (tree decl, int reloc, |
7103 | unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED) |
7104 | { |
7105 | if (DECL_P (decl)) |
7106 | { |
7107 | if (decl_readonly_section (decl, reloc)) |
7108 | return readonly_data_section; |
7109 | } |
7110 | else if (TREE_CODE (decl) == CONSTRUCTOR) |
7111 | { |
7112 | if (! ((flag_pic && reloc) |
7113 | || !TREE_READONLY (decl) |
7114 | || !TREE_CONSTANT (decl))) |
7115 | return readonly_data_section; |
7116 | } |
7117 | else if (TREE_CODE (decl) == STRING_CST) |
7118 | return readonly_data_section; |
7119 | else if (! (flag_pic && reloc)) |
7120 | return readonly_data_section; |
7121 | |
7122 | return data_section; |
7123 | } |
7124 | |
7125 | enum section_category |
7126 | categorize_decl_for_section (const_tree decl, int reloc) |
7127 | { |
7128 | enum section_category ret; |
7129 | |
7130 | if (TREE_CODE (decl) == FUNCTION_DECL) |
7131 | return SECCAT_TEXT; |
7132 | else if (TREE_CODE (decl) == STRING_CST) |
7133 | { |
7134 | if ((flag_sanitize & SANITIZE_ADDRESS) |
7135 | && asan_protect_global (CONST_CAST_TREE (decl))) |
7136 | /* or !flag_merge_constants */ |
7137 | return SECCAT_RODATA; |
7138 | else |
7139 | return SECCAT_RODATA_MERGE_STR; |
7140 | } |
7141 | else if (VAR_P (decl)) |
7142 | { |
7143 | tree d = CONST_CAST_TREE (decl); |
7144 | if (bss_initializer_p (decl)) |
7145 | ret = SECCAT_BSS; |
7146 | else if (! TREE_READONLY (decl) |
7147 | || (DECL_INITIAL (decl) |
7148 | && ! TREE_CONSTANT (DECL_INITIAL (decl)))) |
7149 | { |
7150 | /* Here the reloc_rw_mask is not testing whether the section should |
7151 | be read-only or not, but whether the dynamic link will have to |
7152 | do something. If so, we wish to segregate the data in order to |
7153 | minimize cache misses inside the dynamic linker. */ |
7154 | if (reloc & targetm.asm_out.reloc_rw_mask ()) |
7155 | ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL; |
7156 | else |
7157 | ret = SECCAT_DATA; |
7158 | } |
7159 | else if (reloc & targetm.asm_out.reloc_rw_mask ()) |
7160 | ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO; |
7161 | else if (reloc || (flag_merge_constants < 2 && !DECL_MERGEABLE (decl)) |
7162 | || ((flag_sanitize & SANITIZE_ADDRESS) |
7163 | /* PR 81697: for architectures that use section anchors we |
7164 | need to ignore DECL_RTL_SET_P (decl) for string constants |
7165 | inside this asan_protect_global call because otherwise |
7166 | we'll wrongly put them into SECCAT_RODATA_MERGE_CONST |
7167 | section, set DECL_RTL (decl) later on and add DECL to |
7168 | protected globals via successive asan_protect_global |
7169 | calls. In this scenario we'll end up with wrong |
7170 | alignment of these strings at runtime and possible ASan |
7171 | false positives. */ |
7172 | && asan_protect_global (d, ignore_decl_rtl_set_p: use_object_blocks_p () |
7173 | && use_blocks_for_decl_p (decl: d)))) |
7174 | /* C and C++ don't allow different variables to share the same |
7175 | location. -fmerge-all-constants allows even that (at the |
7176 | expense of not conforming). */ |
7177 | ret = SECCAT_RODATA; |
7178 | else if (DECL_INITIAL (decl) |
7179 | && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST) |
7180 | ret = SECCAT_RODATA_MERGE_STR_INIT; |
7181 | else |
7182 | ret = SECCAT_RODATA_MERGE_CONST; |
7183 | } |
7184 | else if (TREE_CODE (decl) == CONSTRUCTOR) |
7185 | { |
7186 | if ((reloc & targetm.asm_out.reloc_rw_mask ()) |
7187 | || ! TREE_CONSTANT (decl)) |
7188 | ret = SECCAT_DATA; |
7189 | else |
7190 | ret = SECCAT_RODATA; |
7191 | } |
7192 | else |
7193 | ret = SECCAT_RODATA; |
7194 | |
7195 | /* There are no read-only thread-local sections. */ |
7196 | if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) |
7197 | { |
7198 | /* Note that this would be *just* SECCAT_BSS, except that there's |
7199 | no concept of a read-only thread-local-data section. */ |
7200 | if (ret == SECCAT_BSS |
7201 | || DECL_INITIAL (decl) == NULL |
7202 | || (flag_zero_initialized_in_bss |
7203 | && initializer_zerop (DECL_INITIAL (decl)))) |
7204 | ret = SECCAT_TBSS; |
7205 | else |
7206 | ret = SECCAT_TDATA; |
7207 | } |
7208 | |
7209 | /* If the target uses small data sections, select it. */ |
7210 | else if (targetm.in_small_data_p (decl)) |
7211 | { |
7212 | if (ret == SECCAT_BSS) |
7213 | ret = SECCAT_SBSS; |
7214 | else if (targetm.have_srodata_section && ret == SECCAT_RODATA) |
7215 | ret = SECCAT_SRODATA; |
7216 | else |
7217 | ret = SECCAT_SDATA; |
7218 | } |
7219 | |
7220 | return ret; |
7221 | } |
7222 | |
7223 | static bool |
7224 | decl_readonly_section_1 (enum section_category category) |
7225 | { |
7226 | switch (category) |
7227 | { |
7228 | case SECCAT_RODATA: |
7229 | case SECCAT_RODATA_MERGE_STR: |
7230 | case SECCAT_RODATA_MERGE_STR_INIT: |
7231 | case SECCAT_RODATA_MERGE_CONST: |
7232 | case SECCAT_SRODATA: |
7233 | return true; |
7234 | default: |
7235 | return false; |
7236 | } |
7237 | } |
7238 | |
7239 | bool |
7240 | decl_readonly_section (const_tree decl, int reloc) |
7241 | { |
7242 | return decl_readonly_section_1 (category: categorize_decl_for_section (decl, reloc)); |
7243 | } |
7244 | |
7245 | /* Select a section based on the above categorization. */ |
7246 | |
7247 | section * |
7248 | default_elf_select_section (tree decl, int reloc, |
7249 | unsigned HOST_WIDE_INT align) |
7250 | { |
7251 | const char *sname; |
7252 | |
7253 | switch (categorize_decl_for_section (decl, reloc)) |
7254 | { |
7255 | case SECCAT_TEXT: |
7256 | /* We're not supposed to be called on FUNCTION_DECLs. */ |
7257 | gcc_unreachable (); |
7258 | case SECCAT_RODATA: |
7259 | return readonly_data_section; |
7260 | case SECCAT_RODATA_MERGE_STR: |
7261 | return mergeable_string_section (decl, align, flags: 0); |
7262 | case SECCAT_RODATA_MERGE_STR_INIT: |
7263 | return mergeable_string_section (DECL_INITIAL (decl), align, flags: 0); |
7264 | case SECCAT_RODATA_MERGE_CONST: |
7265 | return mergeable_constant_section (DECL_MODE (decl), align, flags: 0); |
7266 | case SECCAT_SRODATA: |
7267 | sname = ".sdata2" ; |
7268 | break; |
7269 | case SECCAT_DATA: |
7270 | if (DECL_P (decl) && DECL_PERSISTENT_P (decl)) |
7271 | { |
7272 | sname = ".persistent" ; |
7273 | break; |
7274 | } |
7275 | return data_section; |
7276 | case SECCAT_DATA_REL: |
7277 | sname = ".data.rel" ; |
7278 | break; |
7279 | case SECCAT_DATA_REL_LOCAL: |
7280 | sname = ".data.rel.local" ; |
7281 | break; |
7282 | case SECCAT_DATA_REL_RO: |
7283 | sname = ".data.rel.ro" ; |
7284 | break; |
7285 | case SECCAT_DATA_REL_RO_LOCAL: |
7286 | sname = ".data.rel.ro.local" ; |
7287 | break; |
7288 | case SECCAT_SDATA: |
7289 | sname = ".sdata" ; |
7290 | break; |
7291 | case SECCAT_TDATA: |
7292 | sname = ".tdata" ; |
7293 | break; |
7294 | case SECCAT_BSS: |
7295 | if (DECL_P (decl) && DECL_NOINIT_P (decl)) |
7296 | { |
7297 | sname = ".noinit" ; |
7298 | break; |
7299 | } |
7300 | if (bss_section) |
7301 | return bss_section; |
7302 | sname = ".bss" ; |
7303 | break; |
7304 | case SECCAT_SBSS: |
7305 | sname = ".sbss" ; |
7306 | break; |
7307 | case SECCAT_TBSS: |
7308 | sname = ".tbss" ; |
7309 | break; |
7310 | default: |
7311 | gcc_unreachable (); |
7312 | } |
7313 | |
7314 | return get_named_section (decl, name: sname, reloc); |
7315 | } |
7316 | |
7317 | /* Construct a unique section name based on the decl name and the |
7318 | categorization performed above. */ |
7319 | |
7320 | void |
7321 | default_unique_section (tree decl, int reloc) |
7322 | { |
7323 | /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */ |
7324 | bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP; |
7325 | const char *prefix, *name, *linkonce; |
7326 | char *string; |
7327 | tree id; |
7328 | |
7329 | switch (categorize_decl_for_section (decl, reloc)) |
7330 | { |
7331 | case SECCAT_TEXT: |
7332 | prefix = one_only ? ".t" : ".text" ; |
7333 | break; |
7334 | case SECCAT_RODATA: |
7335 | case SECCAT_RODATA_MERGE_STR: |
7336 | case SECCAT_RODATA_MERGE_STR_INIT: |
7337 | case SECCAT_RODATA_MERGE_CONST: |
7338 | prefix = one_only ? ".r" : ".rodata" ; |
7339 | break; |
7340 | case SECCAT_SRODATA: |
7341 | prefix = one_only ? ".s2" : ".sdata2" ; |
7342 | break; |
7343 | case SECCAT_DATA: |
7344 | prefix = one_only ? ".d" : ".data" ; |
7345 | if (DECL_P (decl) && DECL_PERSISTENT_P (decl)) |
7346 | { |
7347 | prefix = one_only ? ".p" : ".persistent" ; |
7348 | break; |
7349 | } |
7350 | break; |
7351 | case SECCAT_DATA_REL: |
7352 | prefix = one_only ? ".d.rel" : ".data.rel" ; |
7353 | break; |
7354 | case SECCAT_DATA_REL_LOCAL: |
7355 | prefix = one_only ? ".d.rel.local" : ".data.rel.local" ; |
7356 | break; |
7357 | case SECCAT_DATA_REL_RO: |
7358 | prefix = one_only ? ".d.rel.ro" : ".data.rel.ro" ; |
7359 | break; |
7360 | case SECCAT_DATA_REL_RO_LOCAL: |
7361 | prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local" ; |
7362 | break; |
7363 | case SECCAT_SDATA: |
7364 | prefix = one_only ? ".s" : ".sdata" ; |
7365 | break; |
7366 | case SECCAT_BSS: |
7367 | if (DECL_P (decl) && DECL_NOINIT_P (decl)) |
7368 | { |
7369 | prefix = one_only ? ".n" : ".noinit" ; |
7370 | break; |
7371 | } |
7372 | prefix = one_only ? ".b" : ".bss" ; |
7373 | break; |
7374 | case SECCAT_SBSS: |
7375 | prefix = one_only ? ".sb" : ".sbss" ; |
7376 | break; |
7377 | case SECCAT_TDATA: |
7378 | prefix = one_only ? ".td" : ".tdata" ; |
7379 | break; |
7380 | case SECCAT_TBSS: |
7381 | prefix = one_only ? ".tb" : ".tbss" ; |
7382 | break; |
7383 | default: |
7384 | gcc_unreachable (); |
7385 | } |
7386 | |
7387 | id = DECL_ASSEMBLER_NAME (decl); |
7388 | ultimate_transparent_alias_target (alias: &id); |
7389 | name = IDENTIFIER_POINTER (id); |
7390 | name = targetm.strip_name_encoding (name); |
7391 | |
7392 | /* If we're using one_only, then there needs to be a .gnu.linkonce |
7393 | prefix to the section name. */ |
7394 | linkonce = one_only ? ".gnu.linkonce" : "" ; |
7395 | |
7396 | string = ACONCAT ((linkonce, prefix, "." , name, NULL)); |
7397 | |
7398 | set_decl_section_name (decl, string); |
7399 | } |
7400 | |
7401 | /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */ |
7402 | |
7403 | static int |
7404 | compute_reloc_for_rtx_1 (const_rtx x) |
7405 | { |
7406 | switch (GET_CODE (x)) |
7407 | { |
7408 | case SYMBOL_REF: |
7409 | return SYMBOL_REF_LOCAL_P (x) ? 1 : 2; |
7410 | case LABEL_REF: |
7411 | return 1; |
7412 | default: |
7413 | return 0; |
7414 | } |
7415 | } |
7416 | |
7417 | /* Like compute_reloc_for_constant, except for an RTX. The return value |
7418 | is a mask for which bit 1 indicates a global relocation, and bit 0 |
7419 | indicates a local relocation. Used by default_select_rtx_section |
7420 | and default_elf_select_rtx_section. */ |
7421 | |
7422 | static int |
7423 | compute_reloc_for_rtx (const_rtx x) |
7424 | { |
7425 | switch (GET_CODE (x)) |
7426 | { |
7427 | case SYMBOL_REF: |
7428 | case LABEL_REF: |
7429 | return compute_reloc_for_rtx_1 (x); |
7430 | |
7431 | case CONST: |
7432 | { |
7433 | int reloc = 0; |
7434 | subrtx_iterator::array_type array; |
7435 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
7436 | reloc |= compute_reloc_for_rtx_1 (x: *iter); |
7437 | return reloc; |
7438 | } |
7439 | |
7440 | default: |
7441 | return 0; |
7442 | } |
7443 | } |
7444 | |
7445 | section * |
7446 | default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED, |
7447 | rtx x, |
7448 | unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED) |
7449 | { |
7450 | if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ()) |
7451 | return data_section; |
7452 | else |
7453 | return readonly_data_section; |
7454 | } |
7455 | |
7456 | section * |
7457 | default_elf_select_rtx_section (machine_mode mode, rtx x, |
7458 | unsigned HOST_WIDE_INT align) |
7459 | { |
7460 | int reloc = compute_reloc_for_rtx (x); |
7461 | tree decl = nullptr; |
7462 | const char *prefix = nullptr; |
7463 | int flags = 0; |
7464 | |
7465 | /* If it is a private COMDAT function symbol reference, call |
7466 | function_rodata_section for the read-only or relocated read-only |
7467 | data section associated with function DECL so that the COMDAT |
7468 | section will be used for the private COMDAT function symbol. */ |
7469 | if (HAVE_COMDAT_GROUP) |
7470 | { |
7471 | if (GET_CODE (x) == CONST |
7472 | && GET_CODE (XEXP (x, 0)) == PLUS |
7473 | && CONST_INT_P (XEXP (XEXP (x, 0), 1))) |
7474 | x = XEXP (XEXP (x, 0), 0); |
7475 | |
7476 | if (GET_CODE (x) == SYMBOL_REF) |
7477 | { |
7478 | decl = SYMBOL_REF_DECL (x); |
7479 | if (decl |
7480 | && (TREE_CODE (decl) != FUNCTION_DECL |
7481 | || !DECL_COMDAT_GROUP (decl) |
7482 | || TREE_PUBLIC (decl))) |
7483 | decl = nullptr; |
7484 | } |
7485 | } |
7486 | |
7487 | /* ??? Handle small data here somehow. */ |
7488 | |
7489 | if (reloc & targetm.asm_out.reloc_rw_mask ()) |
7490 | { |
7491 | if (decl) |
7492 | { |
7493 | prefix = reloc == 1 ? ".data.rel.ro.local" : ".data.rel.ro" ; |
7494 | flags = SECTION_WRITE | SECTION_RELRO; |
7495 | } |
7496 | else if (reloc == 1) |
7497 | return get_named_section (NULL, name: ".data.rel.ro.local" , reloc: 1); |
7498 | else |
7499 | return get_named_section (NULL, name: ".data.rel.ro" , reloc: 3); |
7500 | } |
7501 | |
7502 | if (decl) |
7503 | { |
7504 | const char *comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)); |
7505 | if (!prefix) |
7506 | prefix = ".rodata" ; |
7507 | size_t prefix_len = strlen (s: prefix); |
7508 | size_t comdat_len = strlen (s: comdat); |
7509 | size_t len = prefix_len + sizeof (".pool." ) + comdat_len; |
7510 | char *name = XALLOCAVEC (char, len); |
7511 | memcpy (dest: name, src: prefix, n: prefix_len); |
7512 | memcpy (dest: name + prefix_len, src: ".pool." , n: sizeof (".pool." ) - 1); |
7513 | memcpy (dest: name + prefix_len + sizeof (".pool." ) - 1, src: comdat, |
7514 | n: comdat_len + 1); |
7515 | return get_section (name, flags: flags | SECTION_LINKONCE, decl); |
7516 | } |
7517 | |
7518 | return mergeable_constant_section (mode, align, flags: 0); |
7519 | } |
7520 | |
7521 | /* Set the generally applicable flags on the SYMBOL_REF for EXP. */ |
7522 | |
7523 | void |
7524 | default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED) |
7525 | { |
7526 | rtx symbol; |
7527 | int flags; |
7528 | |
7529 | /* Careful not to prod global register variables. */ |
7530 | if (!MEM_P (rtl)) |
7531 | return; |
7532 | symbol = XEXP (rtl, 0); |
7533 | if (GET_CODE (symbol) != SYMBOL_REF) |
7534 | return; |
7535 | |
7536 | flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO; |
7537 | if (TREE_CODE (decl) == FUNCTION_DECL) |
7538 | flags |= SYMBOL_FLAG_FUNCTION; |
7539 | if (targetm.binds_local_p (decl)) |
7540 | flags |= SYMBOL_FLAG_LOCAL; |
7541 | if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) |
7542 | flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT; |
7543 | else if (targetm.in_small_data_p (decl)) |
7544 | flags |= SYMBOL_FLAG_SMALL; |
7545 | /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without |
7546 | being PUBLIC, the thing *must* be defined in this translation unit. |
7547 | Prevent this buglet from being propagated into rtl code as well. */ |
7548 | if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl)) |
7549 | flags |= SYMBOL_FLAG_EXTERNAL; |
7550 | |
7551 | SYMBOL_REF_FLAGS (symbol) = flags; |
7552 | } |
7553 | |
7554 | /* By default, we do nothing for encode_section_info, so we need not |
7555 | do anything but discard the '*' marker. */ |
7556 | |
7557 | const char * |
7558 | default_strip_name_encoding (const char *str) |
7559 | { |
7560 | return str + (*str == '*'); |
7561 | } |
7562 | |
7563 | #ifdef ASM_OUTPUT_DEF |
7564 | /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the |
7565 | anchor relative to ".", the current section position. */ |
7566 | |
7567 | void |
7568 | default_asm_output_anchor (rtx symbol) |
7569 | { |
7570 | gcc_checking_assert (TARGET_SUPPORTS_ALIASES); |
7571 | |
7572 | char buffer[100]; |
7573 | |
7574 | sprintf (s: buffer, format: "*. + " HOST_WIDE_INT_PRINT_DEC, |
7575 | SYMBOL_REF_BLOCK_OFFSET (symbol)); |
7576 | ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer); |
7577 | } |
7578 | #endif |
7579 | |
7580 | /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */ |
7581 | |
7582 | bool |
7583 | default_use_anchors_for_symbol_p (const_rtx symbol) |
7584 | { |
7585 | tree decl; |
7586 | section *sect = SYMBOL_REF_BLOCK (symbol)->sect; |
7587 | |
7588 | /* This function should only be called with non-zero SYMBOL_REF_BLOCK, |
7589 | furthermore get_block_for_section should not create object blocks |
7590 | for mergeable sections. */ |
7591 | gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE)); |
7592 | |
7593 | /* Don't use anchors for small data sections. The small data register |
7594 | acts as an anchor for such sections. */ |
7595 | if (sect->common.flags & SECTION_SMALL) |
7596 | return false; |
7597 | |
7598 | decl = SYMBOL_REF_DECL (symbol); |
7599 | if (decl && DECL_P (decl)) |
7600 | { |
7601 | /* Don't use section anchors for decls that might be defined or |
7602 | usurped by other modules. */ |
7603 | if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl)) |
7604 | return false; |
7605 | |
7606 | /* Don't use section anchors for decls that will be placed in a |
7607 | small data section. */ |
7608 | /* ??? Ideally, this check would be redundant with the SECTION_SMALL |
7609 | one above. The problem is that we only use SECTION_SMALL for |
7610 | sections that should be marked as small in the section directive. */ |
7611 | if (targetm.in_small_data_p (decl)) |
7612 | return false; |
7613 | |
7614 | /* Don't use section anchors for decls that won't fit inside a single |
7615 | anchor range to reduce the amount of instructions required to refer |
7616 | to the entire declaration. */ |
7617 | if (DECL_SIZE_UNIT (decl) == NULL_TREE |
7618 | || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)) |
7619 | || (tree_to_uhwi (DECL_SIZE_UNIT (decl)) |
7620 | >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset)) |
7621 | return false; |
7622 | |
7623 | } |
7624 | return true; |
7625 | } |
7626 | |
7627 | /* Return true when RESOLUTION indicate that symbol will be bound to the |
7628 | definition provided by current .o file. */ |
7629 | |
7630 | static bool |
7631 | resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution) |
7632 | { |
7633 | return (resolution == LDPR_PREVAILING_DEF |
7634 | || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP |
7635 | || resolution == LDPR_PREVAILING_DEF_IRONLY); |
7636 | } |
7637 | |
7638 | /* Return true when RESOLUTION indicate that symbol will be bound locally |
7639 | within current executable or DSO. */ |
7640 | |
7641 | static bool |
7642 | resolution_local_p (enum ld_plugin_symbol_resolution resolution) |
7643 | { |
7644 | return (resolution == LDPR_PREVAILING_DEF |
7645 | || resolution == LDPR_PREVAILING_DEF_IRONLY |
7646 | || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP |
7647 | || resolution == LDPR_PREEMPTED_REG |
7648 | || resolution == LDPR_PREEMPTED_IR |
7649 | || resolution == LDPR_RESOLVED_IR |
7650 | || resolution == LDPR_RESOLVED_EXEC); |
7651 | } |
7652 | |
7653 | /* COMMON_LOCAL_P is true means that the linker can guarantee that an |
7654 | uninitialized common symbol in the executable will still be defined |
7655 | (through COPY relocation) in the executable. */ |
7656 | |
7657 | bool |
7658 | default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate, |
7659 | bool extern_protected_data, bool common_local_p) |
7660 | { |
7661 | /* A non-decl is an entry in the constant pool. */ |
7662 | if (!DECL_P (exp)) |
7663 | return true; |
7664 | |
7665 | /* Weakrefs may not bind locally, even though the weakref itself is always |
7666 | static and therefore local. Similarly, the resolver for ifunc functions |
7667 | might resolve to a non-local function. |
7668 | FIXME: We can resolve the weakref case more curefuly by looking at the |
7669 | weakref alias. */ |
7670 | if (lookup_attribute (attr_name: "weakref" , DECL_ATTRIBUTES (exp)) |
7671 | || (!targetm.ifunc_ref_local_ok () |
7672 | && TREE_CODE (exp) == FUNCTION_DECL |
7673 | && cgraph_node::get (decl: exp) |
7674 | && cgraph_node::get (decl: exp)->ifunc_resolver)) |
7675 | return false; |
7676 | |
7677 | /* Static variables are always local. */ |
7678 | if (! TREE_PUBLIC (exp)) |
7679 | return true; |
7680 | |
7681 | /* With resolution file in hand, take look into resolutions. |
7682 | We can't just return true for resolved_locally symbols, |
7683 | because dynamic linking might overwrite symbols |
7684 | in shared libraries. */ |
7685 | bool resolved_locally = false; |
7686 | |
7687 | bool uninited_common = (DECL_COMMON (exp) |
7688 | && (DECL_INITIAL (exp) == NULL |
7689 | || (!in_lto_p |
7690 | && DECL_INITIAL (exp) == error_mark_node))); |
7691 | |
7692 | /* A non-external variable is defined locally only if it isn't |
7693 | uninitialized COMMON variable or common_local_p is true. */ |
7694 | bool defined_locally = (!DECL_EXTERNAL (exp) |
7695 | && (!uninited_common || common_local_p)); |
7696 | if (symtab_node *node = symtab_node::get (decl: exp)) |
7697 | { |
7698 | if (node->in_other_partition) |
7699 | defined_locally = true; |
7700 | if (node->can_be_discarded_p ()) |
7701 | ; |
7702 | else if (resolution_to_local_definition_p (resolution: node->resolution)) |
7703 | defined_locally = resolved_locally = true; |
7704 | else if (resolution_local_p (resolution: node->resolution)) |
7705 | resolved_locally = true; |
7706 | } |
7707 | if (defined_locally && weak_dominate && !shlib) |
7708 | resolved_locally = true; |
7709 | |
7710 | /* Undefined weak symbols are never defined locally. */ |
7711 | if (DECL_WEAK (exp) && !defined_locally) |
7712 | return false; |
7713 | |
7714 | /* A symbol is local if the user has said explicitly that it will be, |
7715 | or if we have a definition for the symbol. We cannot infer visibility |
7716 | for undefined symbols. */ |
7717 | if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT |
7718 | && (TREE_CODE (exp) == FUNCTION_DECL |
7719 | || !extern_protected_data |
7720 | || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED) |
7721 | && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally)) |
7722 | return true; |
7723 | |
7724 | /* If PIC, then assume that any global name can be overridden by |
7725 | symbols resolved from other modules. */ |
7726 | if (shlib) |
7727 | return false; |
7728 | |
7729 | /* Variables defined outside this object might not be local. */ |
7730 | if (DECL_EXTERNAL (exp) && !resolved_locally) |
7731 | return false; |
7732 | |
7733 | /* Non-dominant weak symbols are not defined locally. */ |
7734 | if (DECL_WEAK (exp) && !resolved_locally) |
7735 | return false; |
7736 | |
7737 | /* Uninitialized COMMON variable may be unified with symbols |
7738 | resolved from other modules. */ |
7739 | if (uninited_common && !resolved_locally) |
7740 | return false; |
7741 | |
7742 | /* Otherwise we're left with initialized (or non-common) global data |
7743 | which is of necessity defined locally. */ |
7744 | return true; |
7745 | } |
7746 | |
7747 | /* Assume ELF-ish defaults, since that's pretty much the most liberal |
7748 | wrt cross-module name binding. */ |
7749 | |
7750 | bool |
7751 | default_binds_local_p (const_tree exp) |
7752 | { |
7753 | return default_binds_local_p_3 (exp, flag_shlib != 0, weak_dominate: true, extern_protected_data: false, common_local_p: false); |
7754 | } |
7755 | |
7756 | /* Similar to default_binds_local_p, but common symbol may be local and |
7757 | extern protected data is non-local. */ |
7758 | |
7759 | bool |
7760 | default_binds_local_p_2 (const_tree exp) |
7761 | { |
7762 | return default_binds_local_p_3 (exp, flag_shlib != 0, weak_dominate: true, extern_protected_data: true, |
7763 | common_local_p: !flag_pic); |
7764 | } |
7765 | |
7766 | bool |
7767 | default_binds_local_p_1 (const_tree exp, int shlib) |
7768 | { |
7769 | return default_binds_local_p_3 (exp, shlib: shlib != 0, weak_dominate: false, extern_protected_data: false, common_local_p: false); |
7770 | } |
7771 | |
7772 | /* Return true when references to DECL must bind to current definition in |
7773 | final executable. |
7774 | |
7775 | The condition is usually equivalent to whether the function binds to the |
7776 | current module (shared library or executable), that is to binds_local_p. |
7777 | We use this fact to avoid need for another target hook and implement |
7778 | the logic using binds_local_p and just special cases where |
7779 | decl_binds_to_current_def_p is stronger than binds_local_p. In particular |
7780 | the weak definitions (that can be overwritten at linktime by other |
7781 | definition from different object file) and when resolution info is available |
7782 | we simply use the knowledge passed to us by linker plugin. */ |
7783 | bool |
7784 | decl_binds_to_current_def_p (const_tree decl) |
7785 | { |
7786 | gcc_assert (DECL_P (decl)); |
7787 | if (!targetm.binds_local_p (decl)) |
7788 | return false; |
7789 | if (!TREE_PUBLIC (decl)) |
7790 | return true; |
7791 | |
7792 | /* When resolution is available, just use it. */ |
7793 | if (symtab_node *node = symtab_node::get (decl)) |
7794 | { |
7795 | if (node->resolution != LDPR_UNKNOWN |
7796 | && !node->can_be_discarded_p ()) |
7797 | return resolution_to_local_definition_p (resolution: node->resolution); |
7798 | } |
7799 | |
7800 | /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks |
7801 | binds locally but still can be overwritten), DECL_COMMON (can be merged |
7802 | with a non-common definition somewhere in the same module) or |
7803 | DECL_EXTERNAL. |
7804 | This rely on fact that binds_local_p behave as decl_replaceable_p |
7805 | for all other declaration types. */ |
7806 | if (DECL_WEAK (decl)) |
7807 | return false; |
7808 | if (DECL_COMMON (decl) |
7809 | && (DECL_INITIAL (decl) == NULL |
7810 | || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node))) |
7811 | return false; |
7812 | if (DECL_EXTERNAL (decl)) |
7813 | return false; |
7814 | return true; |
7815 | } |
7816 | |
7817 | /* A replaceable function or variable is one which may be replaced |
7818 | at link-time with an entirely different definition, provided that the |
7819 | replacement has the same type. For example, functions declared |
7820 | with __attribute__((weak)) on most systems are replaceable. |
7821 | If SEMANTIC_INTERPOSITION_P is false allow interposition only on |
7822 | symbols explicitly declared weak. |
7823 | |
7824 | COMDAT functions are not replaceable, since all definitions of the |
7825 | function must be equivalent. It is important that COMDAT functions |
7826 | not be treated as replaceable so that use of C++ template |
7827 | instantiations is not penalized. */ |
7828 | |
7829 | bool |
7830 | decl_replaceable_p (tree decl, bool semantic_interposition_p) |
7831 | { |
7832 | gcc_assert (DECL_P (decl)); |
7833 | if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl)) |
7834 | return false; |
7835 | if (!semantic_interposition_p |
7836 | && !DECL_WEAK (decl)) |
7837 | return false; |
7838 | return !decl_binds_to_current_def_p (decl); |
7839 | } |
7840 | |
7841 | /* Default function to output code that will globalize a label. A |
7842 | target must define GLOBAL_ASM_OP or provide its own function to |
7843 | globalize a label. */ |
7844 | #ifdef GLOBAL_ASM_OP |
7845 | void |
7846 | default_globalize_label (FILE * stream, const char *name) |
7847 | { |
7848 | fputs (GLOBAL_ASM_OP, stream: stream); |
7849 | assemble_name (file: stream, name); |
7850 | putc (c: '\n', stream: stream); |
7851 | } |
7852 | #endif /* GLOBAL_ASM_OP */ |
7853 | |
7854 | /* Default function to output code that will globalize a declaration. */ |
7855 | void |
7856 | default_globalize_decl_name (FILE * stream, tree decl) |
7857 | { |
7858 | const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); |
7859 | targetm.asm_out.globalize_label (stream, name); |
7860 | } |
7861 | |
7862 | /* Default function to output a label for unwind information. The |
7863 | default is to do nothing. A target that needs nonlocal labels for |
7864 | unwind information must provide its own function to do this. */ |
7865 | void |
7866 | default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED, |
7867 | tree decl ATTRIBUTE_UNUSED, |
7868 | int for_eh ATTRIBUTE_UNUSED, |
7869 | int empty ATTRIBUTE_UNUSED) |
7870 | { |
7871 | } |
7872 | |
7873 | /* Default function to output a label to divide up the exception table. |
7874 | The default is to do nothing. A target that needs/wants to divide |
7875 | up the table must provide it's own function to do this. */ |
7876 | void |
7877 | default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED) |
7878 | { |
7879 | } |
7880 | |
7881 | /* This is how to output an internal numbered label where PREFIX is |
7882 | the class of label and LABELNO is the number within the class. */ |
7883 | |
7884 | void |
7885 | default_generate_internal_label (char *buf, const char *prefix, |
7886 | unsigned long labelno) |
7887 | { |
7888 | ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno); |
7889 | } |
7890 | |
7891 | /* This is how to output an internal numbered label where PREFIX is |
7892 | the class of label and LABELNO is the number within the class. */ |
7893 | |
7894 | void |
7895 | default_internal_label (FILE *stream, const char *prefix, |
7896 | unsigned long labelno) |
7897 | { |
7898 | char *const buf = (char *) alloca (40 + strlen (prefix)); |
7899 | ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno); |
7900 | ASM_OUTPUT_INTERNAL_LABEL (stream, buf); |
7901 | } |
7902 | |
7903 | |
7904 | /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */ |
7905 | |
7906 | void |
7907 | default_asm_declare_constant_name (FILE *file, const char *name, |
7908 | const_tree exp ATTRIBUTE_UNUSED, |
7909 | HOST_WIDE_INT size ATTRIBUTE_UNUSED) |
7910 | { |
7911 | assemble_label (file, name); |
7912 | } |
7913 | |
7914 | /* This is the default behavior at the beginning of a file. It's |
7915 | controlled by two other target-hook toggles. */ |
7916 | void |
7917 | default_file_start (void) |
7918 | { |
7919 | if (targetm.asm_file_start_app_off |
7920 | && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm)) |
7921 | fputs (ASM_APP_OFF, stream: asm_out_file); |
7922 | |
7923 | if (targetm.asm_file_start_file_directive) |
7924 | { |
7925 | /* LTO produced units have no meaningful main_input_filename. */ |
7926 | if (in_lto_p) |
7927 | output_file_directive (asm_out_file, "<artificial>" ); |
7928 | else |
7929 | output_file_directive (asm_out_file, main_input_filename); |
7930 | } |
7931 | } |
7932 | |
7933 | /* This is a generic routine suitable for use as TARGET_ASM_FILE_END |
7934 | which emits a special section directive used to indicate whether or |
7935 | not this object file needs an executable stack. This is primarily |
7936 | a GNU extension to ELF but could be used on other targets. */ |
7937 | |
7938 | int trampolines_created; |
7939 | |
7940 | void |
7941 | file_end_indicate_exec_stack (void) |
7942 | { |
7943 | unsigned int flags = SECTION_DEBUG; |
7944 | if (trampolines_created) |
7945 | flags |= SECTION_CODE; |
7946 | |
7947 | switch_to_section (get_section (name: ".note.GNU-stack" , flags, NULL)); |
7948 | } |
7949 | |
7950 | /* Emit a special section directive to indicate that this object file |
7951 | was compiled with -fsplit-stack. This is used to let the linker |
7952 | detect calls between split-stack code and non-split-stack code, so |
7953 | that it can modify the split-stack code to allocate a sufficiently |
7954 | large stack. We emit another special section if there are any |
7955 | functions in this file which have the no_split_stack attribute, to |
7956 | prevent the linker from warning about being unable to convert the |
7957 | functions if they call non-split-stack code. */ |
7958 | |
7959 | void |
7960 | file_end_indicate_split_stack (void) |
7961 | { |
7962 | if (flag_split_stack) |
7963 | { |
7964 | switch_to_section (get_section (name: ".note.GNU-split-stack" , flags: SECTION_DEBUG, |
7965 | NULL)); |
7966 | if (saw_no_split_stack) |
7967 | switch_to_section (get_section (name: ".note.GNU-no-split-stack" , |
7968 | flags: SECTION_DEBUG, NULL)); |
7969 | } |
7970 | } |
7971 | |
7972 | /* Output DIRECTIVE (a C string) followed by a newline. This is used as |
7973 | a get_unnamed_section callback. */ |
7974 | |
7975 | void |
7976 | output_section_asm_op (const char *directive) |
7977 | { |
7978 | fprintf (stream: asm_out_file, format: "%s\n" , directive); |
7979 | } |
7980 | |
7981 | /* Emit assembly code to switch to section NEW_SECTION. Do nothing if |
7982 | the current section is NEW_SECTION. */ |
7983 | |
7984 | void |
7985 | switch_to_section (section *new_section, tree decl) |
7986 | { |
7987 | bool retain_p; |
7988 | if ((new_section->common.flags & SECTION_NAMED) |
7989 | && decl != nullptr |
7990 | && DECL_P (decl) |
7991 | && ((retain_p = !!lookup_attribute (attr_name: "retain" , |
7992 | DECL_ATTRIBUTES (decl))) |
7993 | != !!(new_section->common.flags & SECTION_RETAIN))) |
7994 | { |
7995 | /* If the SECTION_RETAIN bit doesn't match, switch to a new |
7996 | section. */ |
7997 | tree used_decl, no_used_decl; |
7998 | |
7999 | if (retain_p) |
8000 | { |
8001 | new_section->common.flags |= SECTION_RETAIN; |
8002 | used_decl = decl; |
8003 | no_used_decl = new_section->named.decl; |
8004 | } |
8005 | else |
8006 | { |
8007 | new_section->common.flags &= ~(SECTION_RETAIN |
8008 | | SECTION_DECLARED); |
8009 | used_decl = new_section->named.decl; |
8010 | no_used_decl = decl; |
8011 | } |
8012 | if (no_used_decl != used_decl) |
8013 | { |
8014 | warning (OPT_Wattributes, |
8015 | "%+qD without %<retain%> attribute and %qD with " |
8016 | "%<retain%> attribute are placed in a section with " |
8017 | "the same name" , no_used_decl, used_decl); |
8018 | inform (DECL_SOURCE_LOCATION (used_decl), |
8019 | "%qD was declared here" , used_decl); |
8020 | } |
8021 | } |
8022 | else if (in_section == new_section) |
8023 | return; |
8024 | |
8025 | in_section = new_section; |
8026 | |
8027 | switch (SECTION_STYLE (new_section)) |
8028 | { |
8029 | case SECTION_NAMED: |
8030 | targetm.asm_out.named_section (new_section->named.name, |
8031 | new_section->named.common.flags, |
8032 | new_section->named.decl); |
8033 | break; |
8034 | |
8035 | case SECTION_UNNAMED: |
8036 | new_section->unnamed.callback (new_section->unnamed.data); |
8037 | break; |
8038 | |
8039 | case SECTION_NOSWITCH: |
8040 | gcc_unreachable (); |
8041 | break; |
8042 | } |
8043 | |
8044 | new_section->common.flags |= SECTION_DECLARED; |
8045 | } |
8046 | |
8047 | /* If block symbol SYMBOL has not yet been assigned an offset, place |
8048 | it at the end of its block. */ |
8049 | |
8050 | void |
8051 | place_block_symbol (rtx symbol) |
8052 | { |
8053 | unsigned HOST_WIDE_INT size, mask, offset; |
8054 | class constant_descriptor_rtx *desc; |
8055 | unsigned int alignment; |
8056 | struct object_block *block; |
8057 | tree decl; |
8058 | |
8059 | gcc_assert (SYMBOL_REF_BLOCK (symbol)); |
8060 | if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0) |
8061 | return; |
8062 | |
8063 | /* Work out the symbol's size and alignment. */ |
8064 | if (CONSTANT_POOL_ADDRESS_P (symbol)) |
8065 | { |
8066 | desc = SYMBOL_REF_CONSTANT (symbol); |
8067 | alignment = desc->align; |
8068 | size = GET_MODE_SIZE (mode: desc->mode); |
8069 | } |
8070 | else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) |
8071 | { |
8072 | decl = SYMBOL_REF_DECL (symbol); |
8073 | gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl)); |
8074 | alignment = DECL_ALIGN (decl); |
8075 | size = get_constant_size (DECL_INITIAL (decl)); |
8076 | if ((flag_sanitize & SANITIZE_ADDRESS) |
8077 | && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST |
8078 | && asan_protect_global (DECL_INITIAL (decl))) |
8079 | { |
8080 | size += asan_red_zone_size (size); |
8081 | alignment = MAX (alignment, |
8082 | ASAN_RED_ZONE_SIZE * BITS_PER_UNIT); |
8083 | } |
8084 | } |
8085 | else |
8086 | { |
8087 | struct symtab_node *snode; |
8088 | decl = SYMBOL_REF_DECL (symbol); |
8089 | |
8090 | snode = symtab_node::get (decl); |
8091 | if (snode->alias) |
8092 | { |
8093 | rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl); |
8094 | |
8095 | gcc_assert (MEM_P (target) |
8096 | && GET_CODE (XEXP (target, 0)) == SYMBOL_REF |
8097 | && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0))); |
8098 | target = XEXP (target, 0); |
8099 | place_block_symbol (symbol: target); |
8100 | SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target); |
8101 | return; |
8102 | } |
8103 | alignment = get_variable_align (decl); |
8104 | size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); |
8105 | if ((flag_sanitize & SANITIZE_ADDRESS) |
8106 | && asan_protect_global (decl)) |
8107 | { |
8108 | size += asan_red_zone_size (size); |
8109 | alignment = MAX (alignment, |
8110 | ASAN_RED_ZONE_SIZE * BITS_PER_UNIT); |
8111 | } |
8112 | } |
8113 | |
8114 | /* Calculate the object's offset from the start of the block. */ |
8115 | block = SYMBOL_REF_BLOCK (symbol); |
8116 | mask = alignment / BITS_PER_UNIT - 1; |
8117 | offset = (block->size + mask) & ~mask; |
8118 | SYMBOL_REF_BLOCK_OFFSET (symbol) = offset; |
8119 | |
8120 | /* Record the block's new alignment and size. */ |
8121 | block->alignment = MAX (block->alignment, alignment); |
8122 | block->size = offset + size; |
8123 | |
8124 | vec_safe_push (v&: block->objects, obj: symbol); |
8125 | } |
8126 | |
8127 | /* Return the anchor that should be used to address byte offset OFFSET |
8128 | from the first object in BLOCK. MODEL is the TLS model used |
8129 | to access it. */ |
8130 | |
8131 | rtx |
8132 | get_section_anchor (struct object_block *block, HOST_WIDE_INT offset, |
8133 | enum tls_model model) |
8134 | { |
8135 | char label[100]; |
8136 | unsigned int begin, middle, end; |
8137 | unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta; |
8138 | rtx anchor; |
8139 | |
8140 | /* Work out the anchor's offset. Use an offset of 0 for the first |
8141 | anchor so that we don't pessimize the case where we take the address |
8142 | of a variable at the beginning of the block. This is particularly |
8143 | useful when a block has only one variable assigned to it. |
8144 | |
8145 | We try to place anchors RANGE bytes apart, so there can then be |
8146 | anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of |
8147 | a ptr_mode offset. With some target settings, the lowest such |
8148 | anchor might be out of range for the lowest ptr_mode offset; |
8149 | likewise the highest anchor for the highest offset. Use anchors |
8150 | at the extreme ends of the ptr_mode range in such cases. |
8151 | |
8152 | All arithmetic uses unsigned integers in order to avoid |
8153 | signed overflow. */ |
8154 | max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset; |
8155 | min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset; |
8156 | range = max_offset - min_offset + 1; |
8157 | if (range == 0) |
8158 | offset = 0; |
8159 | else |
8160 | { |
8161 | bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (mode: ptr_mode) - 1); |
8162 | if (offset < 0) |
8163 | { |
8164 | delta = -(unsigned HOST_WIDE_INT) offset + max_offset; |
8165 | delta -= delta % range; |
8166 | if (delta > bias) |
8167 | delta = bias; |
8168 | offset = (HOST_WIDE_INT) (-delta); |
8169 | } |
8170 | else |
8171 | { |
8172 | delta = (unsigned HOST_WIDE_INT) offset - min_offset; |
8173 | delta -= delta % range; |
8174 | if (delta > bias - 1) |
8175 | delta = bias - 1; |
8176 | offset = (HOST_WIDE_INT) delta; |
8177 | } |
8178 | } |
8179 | |
8180 | /* Do a binary search to see if there's already an anchor we can use. |
8181 | Set BEGIN to the new anchor's index if not. */ |
8182 | begin = 0; |
8183 | end = vec_safe_length (v: block->anchors); |
8184 | while (begin != end) |
8185 | { |
8186 | middle = (end + begin) / 2; |
8187 | anchor = (*block->anchors)[middle]; |
8188 | if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset) |
8189 | end = middle; |
8190 | else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset) |
8191 | begin = middle + 1; |
8192 | else if (SYMBOL_REF_TLS_MODEL (anchor) > model) |
8193 | end = middle; |
8194 | else if (SYMBOL_REF_TLS_MODEL (anchor) < model) |
8195 | begin = middle + 1; |
8196 | else |
8197 | return anchor; |
8198 | } |
8199 | |
8200 | /* Create a new anchor with a unique label. */ |
8201 | ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR" , anchor_labelno++); |
8202 | anchor = create_block_symbol (ggc_strdup (label), block, offset); |
8203 | SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR; |
8204 | SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT; |
8205 | |
8206 | /* Insert it at index BEGIN. */ |
8207 | vec_safe_insert (v&: block->anchors, ix: begin, obj: anchor); |
8208 | return anchor; |
8209 | } |
8210 | |
8211 | /* Output the objects in BLOCK. */ |
8212 | |
8213 | static void |
8214 | output_object_block (struct object_block *block) |
8215 | { |
8216 | class constant_descriptor_rtx *desc; |
8217 | unsigned int i; |
8218 | HOST_WIDE_INT offset; |
8219 | tree decl; |
8220 | rtx symbol; |
8221 | |
8222 | if (!block->objects) |
8223 | return; |
8224 | |
8225 | /* Switch to the section and make sure that the first byte is |
8226 | suitably aligned. */ |
8227 | /* Special case VTV comdat sections similar to assemble_variable. */ |
8228 | if (SECTION_STYLE (block->sect) == SECTION_NAMED |
8229 | && block->sect->named.name |
8230 | && (strcmp (s1: block->sect->named.name, s2: ".vtable_map_vars" ) == 0)) |
8231 | handle_vtv_comdat_section (block->sect, block->sect->named.decl); |
8232 | else |
8233 | switch_to_section (new_section: block->sect, SYMBOL_REF_DECL ((*block->objects)[0])); |
8234 | |
8235 | gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE)); |
8236 | assemble_align (align: block->alignment); |
8237 | |
8238 | /* Define the values of all anchors relative to the current section |
8239 | position. */ |
8240 | FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol) |
8241 | targetm.asm_out.output_anchor (symbol); |
8242 | |
8243 | /* Output the objects themselves. */ |
8244 | offset = 0; |
8245 | FOR_EACH_VEC_ELT (*block->objects, i, symbol) |
8246 | { |
8247 | /* Move to the object's offset, padding with zeros if necessary. */ |
8248 | assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset); |
8249 | offset = SYMBOL_REF_BLOCK_OFFSET (symbol); |
8250 | if (CONSTANT_POOL_ADDRESS_P (symbol)) |
8251 | { |
8252 | desc = SYMBOL_REF_CONSTANT (symbol); |
8253 | /* Pass 1 for align as we have already laid out everything in the block. |
8254 | So aligning shouldn't be necessary. */ |
8255 | output_constant_pool_1 (desc, align: 1); |
8256 | offset += GET_MODE_SIZE (mode: desc->mode); |
8257 | } |
8258 | else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) |
8259 | { |
8260 | HOST_WIDE_INT size; |
8261 | decl = SYMBOL_REF_DECL (symbol); |
8262 | assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0), |
8263 | DECL_ALIGN (decl), merge_strings: false); |
8264 | |
8265 | size = get_constant_size (DECL_INITIAL (decl)); |
8266 | offset += size; |
8267 | if ((flag_sanitize & SANITIZE_ADDRESS) |
8268 | && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST |
8269 | && asan_protect_global (DECL_INITIAL (decl))) |
8270 | { |
8271 | size = asan_red_zone_size (size); |
8272 | assemble_zeros (size); |
8273 | offset += size; |
8274 | } |
8275 | } |
8276 | else |
8277 | { |
8278 | HOST_WIDE_INT size; |
8279 | decl = SYMBOL_REF_DECL (symbol); |
8280 | assemble_variable_contents (decl, XSTR (symbol, 0), dont_output_data: false, merge_strings: false); |
8281 | size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); |
8282 | offset += size; |
8283 | if ((flag_sanitize & SANITIZE_ADDRESS) |
8284 | && asan_protect_global (decl)) |
8285 | { |
8286 | size = asan_red_zone_size (size); |
8287 | assemble_zeros (size); |
8288 | offset += size; |
8289 | } |
8290 | } |
8291 | } |
8292 | } |
8293 | |
8294 | /* A callback for qsort to compare object_blocks. */ |
8295 | |
8296 | static int |
8297 | output_object_block_compare (const void *x, const void *y) |
8298 | { |
8299 | object_block *p1 = *(object_block * const*)x; |
8300 | object_block *p2 = *(object_block * const*)y; |
8301 | |
8302 | if (p1->sect->common.flags & SECTION_NAMED |
8303 | && !(p2->sect->common.flags & SECTION_NAMED)) |
8304 | return 1; |
8305 | |
8306 | if (!(p1->sect->common.flags & SECTION_NAMED) |
8307 | && p2->sect->common.flags & SECTION_NAMED) |
8308 | return -1; |
8309 | |
8310 | if (p1->sect->common.flags & SECTION_NAMED |
8311 | && p2->sect->common.flags & SECTION_NAMED) |
8312 | return strcmp (s1: p1->sect->named.name, s2: p2->sect->named.name); |
8313 | |
8314 | unsigned f1 = p1->sect->common.flags; |
8315 | unsigned f2 = p2->sect->common.flags; |
8316 | if (f1 == f2) |
8317 | return 0; |
8318 | return f1 < f2 ? -1 : 1; |
8319 | } |
8320 | |
8321 | /* Output the definitions of all object_blocks. */ |
8322 | |
8323 | void |
8324 | output_object_blocks (void) |
8325 | { |
8326 | vec<object_block *, va_heap> v; |
8327 | v.create (nelems: object_block_htab->elements ()); |
8328 | object_block *obj; |
8329 | hash_table<object_block_hasher>::iterator hi; |
8330 | |
8331 | FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi) |
8332 | v.quick_push (obj); |
8333 | |
8334 | /* Sort them in order to output them in a deterministic manner, |
8335 | otherwise we may get .rodata sections in different orders with |
8336 | and without -g. */ |
8337 | v.qsort (output_object_block_compare); |
8338 | unsigned i; |
8339 | FOR_EACH_VEC_ELT (v, i, obj) |
8340 | output_object_block (block: obj); |
8341 | |
8342 | v.release (); |
8343 | } |
8344 | |
8345 | /* This function provides a possible implementation of the |
8346 | TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered |
8347 | by -frecord-gcc-switches it creates a new mergeable, string section in the |
8348 | assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which |
8349 | contains the switches in ASCII format. |
8350 | |
8351 | FIXME: This code does not correctly handle double quote characters |
8352 | that appear inside strings, (it strips them rather than preserving them). |
8353 | FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL |
8354 | characters - instead it treats them as sub-string separators. Since |
8355 | we want to emit NUL strings terminators into the object file we have to use |
8356 | ASM_OUTPUT_SKIP. */ |
8357 | |
8358 | void |
8359 | elf_record_gcc_switches (const char *options) |
8360 | { |
8361 | section *sec = get_section (name: targetm.asm_out.record_gcc_switches_section, |
8362 | flags: SECTION_DEBUG | SECTION_MERGE |
8363 | | SECTION_STRINGS | (SECTION_ENTSIZE & 1), NULL); |
8364 | switch_to_section (new_section: sec); |
8365 | ASM_OUTPUT_ASCII (asm_out_file, options, strlen (options) + 1); |
8366 | } |
8367 | |
8368 | /* Emit text to declare externally defined symbols. It is needed to |
8369 | properly support non-default visibility. */ |
8370 | void |
8371 | default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED, |
8372 | tree decl, |
8373 | const char *name ATTRIBUTE_UNUSED) |
8374 | { |
8375 | /* We output the name if and only if TREE_SYMBOL_REFERENCED is |
8376 | set in order to avoid putting out names that are never really |
8377 | used. Always output visibility specified in the source. */ |
8378 | if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)) |
8379 | && (DECL_VISIBILITY_SPECIFIED (decl) |
8380 | || targetm.binds_local_p (decl))) |
8381 | maybe_assemble_visibility (decl); |
8382 | } |
8383 | |
8384 | /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */ |
8385 | |
8386 | void |
8387 | default_asm_output_source_filename (FILE *file, const char *name) |
8388 | { |
8389 | #ifdef ASM_OUTPUT_SOURCE_FILENAME |
8390 | ASM_OUTPUT_SOURCE_FILENAME (file, name); |
8391 | #else |
8392 | fprintf (stream: file, format: "\t.file\t" ); |
8393 | output_quoted_string (file, name); |
8394 | putc (c: '\n', stream: file); |
8395 | #endif |
8396 | } |
8397 | |
8398 | /* Output a file name in the form wanted by System V. */ |
8399 | |
8400 | void |
8401 | output_file_directive (FILE *asm_file, const char *input_name) |
8402 | { |
8403 | int len; |
8404 | const char *na; |
8405 | |
8406 | if (input_name == NULL) |
8407 | input_name = "<stdin>" ; |
8408 | else |
8409 | input_name = remap_debug_filename (input_name); |
8410 | |
8411 | len = strlen (s: input_name); |
8412 | na = input_name + len; |
8413 | |
8414 | /* NA gets INPUT_NAME sans directory names. */ |
8415 | while (na > input_name) |
8416 | { |
8417 | if (IS_DIR_SEPARATOR (na[-1])) |
8418 | break; |
8419 | na--; |
8420 | } |
8421 | |
8422 | targetm.asm_out.output_source_filename (asm_file, na); |
8423 | } |
8424 | |
8425 | /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression |
8426 | EXP. */ |
8427 | rtx |
8428 | make_debug_expr_from_rtl (const_rtx exp) |
8429 | { |
8430 | tree ddecl = make_node (DEBUG_EXPR_DECL), type; |
8431 | machine_mode mode = GET_MODE (exp); |
8432 | rtx dval; |
8433 | |
8434 | DECL_ARTIFICIAL (ddecl) = 1; |
8435 | if (REG_P (exp) && REG_EXPR (exp)) |
8436 | type = TREE_TYPE (REG_EXPR (exp)); |
8437 | else if (MEM_P (exp) && MEM_EXPR (exp)) |
8438 | type = TREE_TYPE (MEM_EXPR (exp)); |
8439 | else |
8440 | type = NULL_TREE; |
8441 | if (type && TYPE_MODE (type) == mode) |
8442 | TREE_TYPE (ddecl) = type; |
8443 | else |
8444 | TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1); |
8445 | SET_DECL_MODE (ddecl, mode); |
8446 | dval = gen_rtx_DEBUG_EXPR (mode); |
8447 | DEBUG_EXPR_TREE_DECL (dval) = ddecl; |
8448 | SET_DECL_RTL (ddecl, dval); |
8449 | return dval; |
8450 | } |
8451 | |
8452 | #ifdef ELF_ASCII_ESCAPES |
8453 | /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */ |
8454 | |
8455 | void |
8456 | default_elf_asm_output_limited_string (FILE *f, const char *s) |
8457 | { |
8458 | int escape; |
8459 | unsigned char c; |
8460 | |
8461 | fputs (STRING_ASM_OP, stream: f); |
8462 | putc (c: '"', stream: f); |
8463 | while (*s != '\0') |
8464 | { |
8465 | c = *s; |
8466 | escape = ELF_ASCII_ESCAPES[c]; |
8467 | switch (escape) |
8468 | { |
8469 | case 0: |
8470 | putc (c: c, stream: f); |
8471 | break; |
8472 | case 1: |
8473 | putc (c: '\\', stream: f); |
8474 | putc (c: '0'+((c>>6)&7), stream: f); |
8475 | putc (c: '0'+((c>>3)&7), stream: f); |
8476 | putc (c: '0'+(c&7), stream: f); |
8477 | break; |
8478 | default: |
8479 | putc (c: '\\', stream: f); |
8480 | putc (c: escape, stream: f); |
8481 | break; |
8482 | } |
8483 | s++; |
8484 | } |
8485 | putc (c: '\"', stream: f); |
8486 | putc (c: '\n', stream: f); |
8487 | } |
8488 | |
8489 | /* Default ASM_OUTPUT_ASCII for ELF targets. */ |
8490 | |
8491 | void |
8492 | default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len) |
8493 | { |
8494 | const char *limit = s + len; |
8495 | const char *last_null = NULL; |
8496 | unsigned bytes_in_chunk = 0; |
8497 | unsigned char c; |
8498 | int escape; |
8499 | |
8500 | for (; s < limit; s++) |
8501 | { |
8502 | const char *p; |
8503 | |
8504 | if (bytes_in_chunk >= 60) |
8505 | { |
8506 | putc (c: '\"', stream: f); |
8507 | putc (c: '\n', stream: f); |
8508 | bytes_in_chunk = 0; |
8509 | } |
8510 | |
8511 | if (s > last_null) |
8512 | { |
8513 | for (p = s; p < limit && *p != '\0'; p++) |
8514 | continue; |
8515 | last_null = p; |
8516 | } |
8517 | else |
8518 | p = last_null; |
8519 | |
8520 | if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT) |
8521 | { |
8522 | if (bytes_in_chunk > 0) |
8523 | { |
8524 | putc (c: '\"', stream: f); |
8525 | putc (c: '\n', stream: f); |
8526 | bytes_in_chunk = 0; |
8527 | } |
8528 | |
8529 | default_elf_asm_output_limited_string (f, s); |
8530 | s = p; |
8531 | } |
8532 | else |
8533 | { |
8534 | if (bytes_in_chunk == 0) |
8535 | fputs (ASCII_DATA_ASM_OP "\"" , stream: f); |
8536 | |
8537 | c = *s; |
8538 | escape = ELF_ASCII_ESCAPES[c]; |
8539 | switch (escape) |
8540 | { |
8541 | case 0: |
8542 | putc (c: c, stream: f); |
8543 | bytes_in_chunk++; |
8544 | break; |
8545 | case 1: |
8546 | putc (c: '\\', stream: f); |
8547 | putc (c: '0'+((c>>6)&7), stream: f); |
8548 | putc (c: '0'+((c>>3)&7), stream: f); |
8549 | putc (c: '0'+(c&7), stream: f); |
8550 | bytes_in_chunk += 4; |
8551 | break; |
8552 | default: |
8553 | putc (c: '\\', stream: f); |
8554 | putc (c: escape, stream: f); |
8555 | bytes_in_chunk += 2; |
8556 | break; |
8557 | } |
8558 | |
8559 | } |
8560 | } |
8561 | |
8562 | if (bytes_in_chunk > 0) |
8563 | { |
8564 | putc (c: '\"', stream: f); |
8565 | putc (c: '\n', stream: f); |
8566 | } |
8567 | } |
8568 | #endif |
8569 | |
8570 | static GTY(()) section *elf_init_array_section; |
8571 | static GTY(()) section *elf_fini_array_section; |
8572 | |
8573 | static section * |
8574 | get_elf_initfini_array_priority_section (int priority, |
8575 | bool constructor_p) |
8576 | { |
8577 | section *sec; |
8578 | if (priority != DEFAULT_INIT_PRIORITY) |
8579 | { |
8580 | char buf[18]; |
8581 | sprintf (s: buf, format: "%s.%.5u" , |
8582 | constructor_p ? ".init_array" : ".fini_array" , |
8583 | priority); |
8584 | sec = get_section (name: buf, flags: SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); |
8585 | } |
8586 | else |
8587 | { |
8588 | if (constructor_p) |
8589 | { |
8590 | if (elf_init_array_section == NULL) |
8591 | elf_init_array_section |
8592 | = get_section (name: ".init_array" , |
8593 | flags: SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); |
8594 | sec = elf_init_array_section; |
8595 | } |
8596 | else |
8597 | { |
8598 | if (elf_fini_array_section == NULL) |
8599 | elf_fini_array_section |
8600 | = get_section (name: ".fini_array" , |
8601 | flags: SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); |
8602 | sec = elf_fini_array_section; |
8603 | } |
8604 | } |
8605 | return sec; |
8606 | } |
8607 | |
8608 | /* Use .init_array section for constructors. */ |
8609 | |
8610 | void |
8611 | default_elf_init_array_asm_out_constructor (rtx symbol, int priority) |
8612 | { |
8613 | section *sec = get_elf_initfini_array_priority_section (priority, |
8614 | constructor_p: true); |
8615 | assemble_addr_to_section (symbol, sec); |
8616 | } |
8617 | |
8618 | /* Use .fini_array section for destructors. */ |
8619 | |
8620 | void |
8621 | default_elf_fini_array_asm_out_destructor (rtx symbol, int priority) |
8622 | { |
8623 | section *sec = get_elf_initfini_array_priority_section (priority, |
8624 | constructor_p: false); |
8625 | assemble_addr_to_section (symbol, sec); |
8626 | } |
8627 | |
8628 | /* Default TARGET_ASM_OUTPUT_IDENT hook. |
8629 | |
8630 | This is a bit of a cheat. The real default is a no-op, but this |
8631 | hook is the default for all targets with a .ident directive. */ |
8632 | |
8633 | void |
8634 | default_asm_output_ident_directive (const char *ident_str) |
8635 | { |
8636 | const char *ident_asm_op = "\t.ident\t" ; |
8637 | |
8638 | /* If we are still in the front end, do not write out the string |
8639 | to asm_out_file. Instead, add a fake top-level asm statement. |
8640 | This allows the front ends to use this hook without actually |
8641 | writing to asm_out_file, to handle #ident or Pragma Ident. */ |
8642 | if (symtab->state == PARSING) |
8643 | { |
8644 | char *buf = ACONCAT ((ident_asm_op, "\"" , ident_str, "\"\n" , NULL)); |
8645 | symtab->finalize_toplevel_asm (asm_str: build_string (strlen (s: buf), buf)); |
8646 | } |
8647 | else |
8648 | fprintf (stream: asm_out_file, format: "%s\"%s\"\n" , ident_asm_op, ident_str); |
8649 | } |
8650 | |
8651 | /* Switch to a COMDAT section with COMDAT name of decl. |
8652 | |
8653 | FIXME: resolve_unique_section needs to deal better with |
8654 | decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once |
8655 | that is fixed, this if-else statement can be replaced with |
8656 | a single call to "switch_to_section (sect)". */ |
8657 | |
8658 | void |
8659 | switch_to_comdat_section (section *sect, tree decl) |
8660 | { |
8661 | #if defined (OBJECT_FORMAT_ELF) |
8662 | targetm.asm_out.named_section (sect->named.name, |
8663 | sect->named.common.flags |
8664 | | SECTION_LINKONCE, |
8665 | decl); |
8666 | in_section = sect; |
8667 | #else |
8668 | /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here. |
8669 | Therefore the following check is used. |
8670 | In case a the target is PE or COFF a comdat group section |
8671 | is created, e.g. .vtable_map_vars$foo. The linker places |
8672 | everything in .vtable_map_vars at the end. |
8673 | |
8674 | A fix could be made in |
8675 | gcc/config/i386/winnt.cc: i386_pe_unique_section. */ |
8676 | if (TARGET_PECOFF) |
8677 | { |
8678 | char *name; |
8679 | |
8680 | if (TREE_CODE (decl) == IDENTIFIER_NODE) |
8681 | name = ACONCAT ((sect->named.name, "$" , |
8682 | IDENTIFIER_POINTER (decl), NULL)); |
8683 | else |
8684 | name = ACONCAT ((sect->named.name, "$" , |
8685 | IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)), |
8686 | NULL)); |
8687 | |
8688 | targetm.asm_out.named_section (name, |
8689 | sect->named.common.flags |
8690 | | SECTION_LINKONCE, |
8691 | decl); |
8692 | in_section = sect; |
8693 | } |
8694 | else |
8695 | switch_to_section (sect); |
8696 | #endif |
8697 | } |
8698 | |
8699 | /* This function ensures that vtable_map variables are not only |
8700 | in the comdat section, but that each variable has its own unique |
8701 | comdat name. Without this the variables end up in the same section |
8702 | with a single comdat name. */ |
8703 | |
8704 | static void |
8705 | handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED) |
8706 | { |
8707 | switch_to_comdat_section(sect, DECL_NAME (decl)); |
8708 | } |
8709 | |
8710 | #include "gt-varasm.h" |
8711 | |