]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/varasm.c
c-decl.c (start_decl): Adjust quoting and hyphenation in diagnostics.
[thirdparty/gcc.git] / gcc / varasm.c
1 /* Output variables, constants and external declarations, for GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file handles generation of all the assembler code
22 *except* the instructions of a function.
23 This includes declarations of variables and their initial values.
24
25 We also output the assembler code for constants stored in memory
26 and are responsible for combining constants with the same value. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "predict.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "stringpool.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "cgraph.h"
42 #include "diagnostic-core.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "flags.h"
47 #include "stmt.h"
48 #include "expr.h"
49 #include "expmed.h"
50 #include "output.h"
51 #include "langhooks.h"
52 #include "debug.h"
53 #include "common/common-target.h"
54 #include "stringpool.h"
55 #include "attribs.h"
56 #include "asan.h"
57 #include "rtl-iter.h"
58 #include "file-prefix-map.h" /* remap_debug_filename() */
59
60 #ifdef XCOFF_DEBUGGING_INFO
61 #include "xcoffout.h" /* Needed for external data declarations. */
62 #endif
63
64 /* The (assembler) name of the first globally-visible object output. */
65 extern GTY(()) const char *first_global_object_name;
66 extern GTY(()) const char *weak_global_object_name;
67
68 const char *first_global_object_name;
69 const char *weak_global_object_name;
70
71 struct addr_const;
72 struct constant_descriptor_rtx;
73 struct rtx_constant_pool;
74
75 #define n_deferred_constants (crtl->varasm.deferred_constants)
76
77 /* Number for making the label on the next
78 constant that is stored in memory. */
79
80 static GTY(()) int const_labelno;
81
82 /* Carry information from ASM_DECLARE_OBJECT_NAME
83 to ASM_FINISH_DECLARE_OBJECT. */
84
85 int size_directive_output;
86
87 /* The last decl for which assemble_variable was called,
88 if it did ASM_DECLARE_OBJECT_NAME.
89 If the last call to assemble_variable didn't do that,
90 this holds 0. */
91
92 tree last_assemble_variable_decl;
93
94 /* The following global variable indicates if the first basic block
95 in a function belongs to the cold partition or not. */
96
97 bool first_function_block_is_cold;
98
99 /* Whether we saw any functions with no_split_stack. */
100
101 static bool saw_no_split_stack;
102
103 static const char *strip_reg_name (const char *);
104 static int contains_pointers_p (tree);
105 #ifdef ASM_OUTPUT_EXTERNAL
106 static bool incorporeal_function_p (tree);
107 #endif
108 static void decode_addr_const (tree, struct addr_const *);
109 static hashval_t const_hash_1 (const tree);
110 static int compare_constant (const tree, const tree);
111 static void output_constant_def_contents (rtx);
112 static void output_addressed_constants (tree);
113 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
114 unsigned int, bool, bool);
115 static void globalize_decl (tree);
116 static bool decl_readonly_section_1 (enum section_category);
117 #ifdef BSS_SECTION_ASM_OP
118 #ifdef ASM_OUTPUT_ALIGNED_BSS
119 static void asm_output_aligned_bss (FILE *, tree, const char *,
120 unsigned HOST_WIDE_INT, int)
121 ATTRIBUTE_UNUSED;
122 #endif
123 #endif /* BSS_SECTION_ASM_OP */
124 static void mark_weak (tree);
125 static void output_constant_pool (const char *, tree);
126 static void handle_vtv_comdat_section (section *, const_tree);
127 \f
128 /* Well-known sections, each one associated with some sort of *_ASM_OP. */
129 section *text_section;
130 section *data_section;
131 section *readonly_data_section;
132 section *sdata_section;
133 section *ctors_section;
134 section *dtors_section;
135 section *bss_section;
136 section *sbss_section;
137
138 /* Various forms of common section. All are guaranteed to be nonnull. */
139 section *tls_comm_section;
140 section *comm_section;
141 section *lcomm_section;
142
143 /* A SECTION_NOSWITCH section used for declaring global BSS variables.
144 May be null. */
145 section *bss_noswitch_section;
146
147 /* The section that holds the main exception table, when known. The section
148 is set either by the target's init_sections hook or by the first call to
149 switch_to_exception_section. */
150 section *exception_section;
151
152 /* The section that holds the DWARF2 frame unwind information, when known.
153 The section is set either by the target's init_sections hook or by the
154 first call to switch_to_eh_frame_section. */
155 section *eh_frame_section;
156
157 /* asm_out_file's current section. This is NULL if no section has yet
158 been selected or if we lose track of what the current section is. */
159 section *in_section;
160
161 /* True if code for the current function is currently being directed
162 at the cold section. */
163 bool in_cold_section_p;
164
165 /* The following global holds the "function name" for the code in the
166 cold section of a function, if hot/cold function splitting is enabled
167 and there was actually code that went into the cold section. A
168 pseudo function name is needed for the cold section of code for some
169 debugging tools that perform symbolization. */
170 tree cold_function_name = NULL_TREE;
171
172 /* A linked list of all the unnamed sections. */
173 static GTY(()) section *unnamed_sections;
174
175 /* Return a nonzero value if DECL has a section attribute. */
176 #define IN_NAMED_SECTION(DECL) \
177 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL)
178
179 struct section_hasher : ggc_ptr_hash<section>
180 {
181 typedef const char *compare_type;
182
183 static hashval_t hash (section *);
184 static bool equal (section *, const char *);
185 };
186
187 /* Hash table of named sections. */
188 static GTY(()) hash_table<section_hasher> *section_htab;
189
190 struct object_block_hasher : ggc_ptr_hash<object_block>
191 {
192 typedef const section *compare_type;
193
194 static hashval_t hash (object_block *);
195 static bool equal (object_block *, const section *);
196 };
197
198 /* A table of object_blocks, indexed by section. */
199 static GTY(()) hash_table<object_block_hasher> *object_block_htab;
200
201 /* The next number to use for internal anchor labels. */
202 static GTY(()) int anchor_labelno;
203
204 /* A pool of constants that can be shared between functions. */
205 static GTY(()) struct rtx_constant_pool *shared_constant_pool;
206
207 /* Helper routines for maintaining section_htab. */
208
209 bool
210 section_hasher::equal (section *old, const char *new_name)
211 {
212 return strcmp (old->named.name, new_name) == 0;
213 }
214
215 hashval_t
216 section_hasher::hash (section *old)
217 {
218 return htab_hash_string (old->named.name);
219 }
220
221 /* Return a hash value for section SECT. */
222
223 static hashval_t
224 hash_section (section *sect)
225 {
226 if (sect->common.flags & SECTION_NAMED)
227 return htab_hash_string (sect->named.name);
228 return sect->common.flags & ~SECTION_DECLARED;
229 }
230
231 /* Helper routines for maintaining object_block_htab. */
232
233 inline bool
234 object_block_hasher::equal (object_block *old, const section *new_section)
235 {
236 return old->sect == new_section;
237 }
238
239 hashval_t
240 object_block_hasher::hash (object_block *old)
241 {
242 return hash_section (old->sect);
243 }
244
245 /* Return a new unnamed section with the given fields. */
246
247 section *
248 get_unnamed_section (unsigned int flags, void (*callback) (const void *),
249 const void *data)
250 {
251 section *sect;
252
253 sect = ggc_alloc<section> ();
254 sect->unnamed.common.flags = flags | SECTION_UNNAMED;
255 sect->unnamed.callback = callback;
256 sect->unnamed.data = data;
257 sect->unnamed.next = unnamed_sections;
258
259 unnamed_sections = sect;
260 return sect;
261 }
262
263 /* Return a SECTION_NOSWITCH section with the given fields. */
264
265 static section *
266 get_noswitch_section (unsigned int flags, noswitch_section_callback callback)
267 {
268 section *sect;
269
270 sect = ggc_alloc<section> ();
271 sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
272 sect->noswitch.callback = callback;
273
274 return sect;
275 }
276
277 /* Return the named section structure associated with NAME. Create
278 a new section with the given fields if no such structure exists. */
279
280 section *
281 get_section (const char *name, unsigned int flags, tree decl)
282 {
283 section *sect, **slot;
284
285 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name),
286 INSERT);
287 flags |= SECTION_NAMED;
288 if (*slot == NULL)
289 {
290 sect = ggc_alloc<section> ();
291 sect->named.common.flags = flags;
292 sect->named.name = ggc_strdup (name);
293 sect->named.decl = decl;
294 *slot = sect;
295 }
296 else
297 {
298 sect = *slot;
299 /* It is fine if one of the sections has SECTION_NOTYPE as long as
300 the other has none of the contrary flags (see the logic at the end
301 of default_section_type_flags, below). */
302 if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
303 && !((sect->common.flags | flags)
304 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
305 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
306 {
307 sect->common.flags |= SECTION_NOTYPE;
308 flags |= SECTION_NOTYPE;
309 }
310 if ((sect->common.flags & ~SECTION_DECLARED) != flags
311 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
312 {
313 /* It is fine if one of the section flags is
314 SECTION_WRITE | SECTION_RELRO and the other has none of these
315 flags (i.e. read-only) in named sections and either the
316 section hasn't been declared yet or has been declared as writable.
317 In that case just make sure the resulting flags are
318 SECTION_WRITE | SECTION_RELRO, ie. writable only because of
319 relocations. */
320 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO))
321 == (SECTION_WRITE | SECTION_RELRO)
322 && (sect->common.flags
323 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO))
324 == (flags & ~(SECTION_WRITE | SECTION_RELRO))
325 && ((sect->common.flags & SECTION_DECLARED) == 0
326 || (sect->common.flags & SECTION_WRITE)))
327 {
328 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO);
329 return sect;
330 }
331 /* Sanity check user variables for flag changes. */
332 if (sect->named.decl != NULL
333 && DECL_P (sect->named.decl)
334 && decl != sect->named.decl)
335 {
336 if (decl != NULL && DECL_P (decl))
337 error ("%+qD causes a section type conflict with %qD",
338 decl, sect->named.decl);
339 else
340 error ("section type conflict with %qD", sect->named.decl);
341 inform (DECL_SOURCE_LOCATION (sect->named.decl),
342 "%qD was declared here", sect->named.decl);
343 }
344 else if (decl != NULL && DECL_P (decl))
345 error ("%+qD causes a section type conflict", decl);
346 else
347 error ("section type conflict");
348 /* Make sure we don't error about one section multiple times. */
349 sect->common.flags |= SECTION_OVERRIDE;
350 }
351 }
352 return sect;
353 }
354
355 /* Return true if the current compilation mode benefits from having
356 objects grouped into blocks. */
357
358 static bool
359 use_object_blocks_p (void)
360 {
361 return flag_section_anchors;
362 }
363
364 /* Return the object_block structure for section SECT. Create a new
365 structure if we haven't created one already. Return null if SECT
366 itself is null. Return also null for mergeable sections since
367 section anchors can't be used in mergeable sections anyway,
368 because the linker might move objects around, and using the
369 object blocks infrastructure in that case is both a waste and a
370 maintenance burden. */
371
372 static struct object_block *
373 get_block_for_section (section *sect)
374 {
375 struct object_block *block;
376
377 if (sect == NULL)
378 return NULL;
379
380 if (sect->common.flags & SECTION_MERGE)
381 return NULL;
382
383 object_block **slot
384 = object_block_htab->find_slot_with_hash (sect, hash_section (sect),
385 INSERT);
386 block = *slot;
387 if (block == NULL)
388 {
389 block = ggc_cleared_alloc<object_block> ();
390 block->sect = sect;
391 *slot = block;
392 }
393 return block;
394 }
395
396 /* Create a symbol with label LABEL and place it at byte offset
397 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
398 is not yet known. LABEL must be a garbage-collected string. */
399
400 static rtx
401 create_block_symbol (const char *label, struct object_block *block,
402 HOST_WIDE_INT offset)
403 {
404 rtx symbol;
405 unsigned int size;
406
407 /* Create the extended SYMBOL_REF. */
408 size = RTX_HDR_SIZE + sizeof (struct block_symbol);
409 symbol = (rtx) ggc_internal_alloc (size);
410
411 /* Initialize the normal SYMBOL_REF fields. */
412 memset (symbol, 0, size);
413 PUT_CODE (symbol, SYMBOL_REF);
414 PUT_MODE (symbol, Pmode);
415 XSTR (symbol, 0) = label;
416 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO;
417
418 /* Initialize the block_symbol stuff. */
419 SYMBOL_REF_BLOCK (symbol) = block;
420 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
421
422 return symbol;
423 }
424
425 /* Return a section with a particular name and with whatever SECTION_*
426 flags section_type_flags deems appropriate. The name of the section
427 is taken from NAME if nonnull, otherwise it is taken from DECL's
428 DECL_SECTION_NAME. DECL is the decl associated with the section
429 (see the section comment for details) and RELOC is as for
430 section_type_flags. */
431
432 section *
433 get_named_section (tree decl, const char *name, int reloc)
434 {
435 unsigned int flags;
436
437 if (name == NULL)
438 {
439 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl));
440 name = DECL_SECTION_NAME (decl);
441 }
442
443 flags = targetm.section_type_flags (decl, name, reloc);
444 return get_section (name, flags, decl);
445 }
446
447 /* Worker for resolve_unique_section. */
448
449 static bool
450 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
451 {
452 n->implicit_section = true;
453 return false;
454 }
455
456 /* If required, set DECL_SECTION_NAME to a unique name. */
457
458 void
459 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
460 int flag_function_or_data_sections)
461 {
462 if (DECL_SECTION_NAME (decl) == NULL
463 && targetm_common.have_named_sections
464 && (flag_function_or_data_sections
465 || DECL_COMDAT_GROUP (decl)))
466 {
467 targetm.asm_out.unique_section (decl, reloc);
468 if (DECL_SECTION_NAME (decl))
469 symtab_node::get (decl)->call_for_symbol_and_aliases
470 (set_implicit_section, NULL, true);
471 }
472 }
473
474 #ifdef BSS_SECTION_ASM_OP
475
476 #ifdef ASM_OUTPUT_ALIGNED_BSS
477
478 /* Utility function for targets to use in implementing
479 ASM_OUTPUT_ALIGNED_BSS.
480 ??? It is believed that this function will work in most cases so such
481 support is localized here. */
482
483 static void
484 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
485 const char *name, unsigned HOST_WIDE_INT size,
486 int align)
487 {
488 switch_to_section (bss_section);
489 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
490 #ifdef ASM_DECLARE_OBJECT_NAME
491 last_assemble_variable_decl = decl;
492 ASM_DECLARE_OBJECT_NAME (file, name, decl);
493 #else
494 /* Standard thing is just output label for the object. */
495 ASM_OUTPUT_LABEL (file, name);
496 #endif /* ASM_DECLARE_OBJECT_NAME */
497 ASM_OUTPUT_SKIP (file, size ? size : 1);
498 }
499
500 #endif
501
502 #endif /* BSS_SECTION_ASM_OP */
503
504 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS
505 /* Return the hot section for function DECL. Return text_section for
506 null DECLs. */
507
508 static section *
509 hot_function_section (tree decl)
510 {
511 if (decl != NULL_TREE
512 && DECL_SECTION_NAME (decl) != NULL
513 && targetm_common.have_named_sections)
514 return get_named_section (decl, NULL, 0);
515 else
516 return text_section;
517 }
518 #endif
519
520 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL)
521 is NULL.
522
523 When DECL_SECTION_NAME is non-NULL and it is implicit section and
524 NAMED_SECTION_SUFFIX is non-NULL, then produce section called
525 concatenate the name with NAMED_SECTION_SUFFIX.
526 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */
527
528 section *
529 get_named_text_section (tree decl,
530 const char *text_section_name,
531 const char *named_section_suffix)
532 {
533 if (decl && DECL_SECTION_NAME (decl))
534 {
535 if (named_section_suffix)
536 {
537 const char *dsn = DECL_SECTION_NAME (decl);
538 const char *stripped_name;
539 char *name, *buffer;
540
541 name = (char *) alloca (strlen (dsn) + 1);
542 memcpy (name, dsn,
543 strlen (dsn) + 1);
544
545 stripped_name = targetm.strip_name_encoding (name);
546
547 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
548 return get_named_section (decl, buffer, 0);
549 }
550 else if (symtab_node::get (decl)->implicit_section)
551 {
552 const char *name;
553
554 /* Do not try to split gnu_linkonce functions. This gets somewhat
555 slipperly. */
556 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP)
557 return NULL;
558 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
559 name = targetm.strip_name_encoding (name);
560 return get_named_section (decl, ACONCAT ((text_section_name, ".",
561 name, NULL)), 0);
562 }
563 else
564 return NULL;
565 }
566 return get_named_section (decl, text_section_name, 0);
567 }
568
569 /* Choose named function section based on its frequency. */
570
571 section *
572 default_function_section (tree decl, enum node_frequency freq,
573 bool startup, bool exit)
574 {
575 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG
576 /* Old GNU linkers have buggy --gc-section support, which sometimes
577 results in .gcc_except_table* sections being garbage collected. */
578 if (decl
579 && symtab_node::get (decl)->implicit_section)
580 return NULL;
581 #endif
582
583 if (!flag_reorder_functions
584 || !targetm_common.have_named_sections)
585 return NULL;
586 /* Startup code should go to startup subsection unless it is
587 unlikely executed (this happens especially with function splitting
588 where we can split away unnecessary parts of static constructors. */
589 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
590 {
591 /* If we do have a profile or(and) LTO phase is executed, we do not need
592 these ELF section. */
593 if (!in_lto_p || !flag_profile_values)
594 return get_named_text_section (decl, ".text.startup", NULL);
595 else
596 return NULL;
597 }
598
599 /* Similarly for exit. */
600 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
601 return get_named_text_section (decl, ".text.exit", NULL);
602
603 /* Group cold functions together, similarly for hot code. */
604 switch (freq)
605 {
606 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
607 return get_named_text_section (decl, ".text.unlikely", NULL);
608 case NODE_FREQUENCY_HOT:
609 /* If we do have a profile or(and) LTO phase is executed, we do not need
610 these ELF section. */
611 if (!in_lto_p || !flag_profile_values)
612 return get_named_text_section (decl, ".text.hot", NULL);
613 /* FALLTHRU */
614 default:
615 return NULL;
616 }
617 }
618
619 /* Return the section for function DECL.
620
621 If DECL is NULL_TREE, return the text section. We can be passed
622 NULL_TREE under some circumstances by dbxout.c at least.
623
624 If FORCE_COLD is true, return cold function section ignoring
625 the frequency info of cgraph_node. */
626
627 static section *
628 function_section_1 (tree decl, bool force_cold)
629 {
630 section *section = NULL;
631 enum node_frequency freq = NODE_FREQUENCY_NORMAL;
632 bool startup = false, exit = false;
633
634 if (decl)
635 {
636 struct cgraph_node *node = cgraph_node::get (decl);
637
638 if (node)
639 {
640 freq = node->frequency;
641 startup = node->only_called_at_startup;
642 exit = node->only_called_at_exit;
643 }
644 }
645 if (force_cold)
646 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED;
647
648 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS
649 if (decl != NULL_TREE
650 && DECL_SECTION_NAME (decl) != NULL)
651 {
652 if (targetm.asm_out.function_section)
653 section = targetm.asm_out.function_section (decl, freq,
654 startup, exit);
655 if (section)
656 return section;
657 return get_named_section (decl, NULL, 0);
658 }
659 else
660 return targetm.asm_out.select_section
661 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED,
662 symtab_node::get (decl)->definition_alignment ());
663 #else
664 if (targetm.asm_out.function_section)
665 section = targetm.asm_out.function_section (decl, freq, startup, exit);
666 if (section)
667 return section;
668 return hot_function_section (decl);
669 #endif
670 }
671
672 /* Return the section for function DECL.
673
674 If DECL is NULL_TREE, return the text section. We can be passed
675 NULL_TREE under some circumstances by dbxout.c at least. */
676
677 section *
678 function_section (tree decl)
679 {
680 /* Handle cases where function splitting code decides
681 to put function entry point into unlikely executed section
682 despite the fact that the function itself is not cold
683 (i.e. it is called rarely but contains a hot loop that is
684 better to live in hot subsection for the code locality). */
685 return function_section_1 (decl,
686 first_function_block_is_cold);
687 }
688
689 /* Return the section for the current function, take IN_COLD_SECTION_P
690 into account. */
691
692 section *
693 current_function_section (void)
694 {
695 return function_section_1 (current_function_decl, in_cold_section_p);
696 }
697
698 /* Tell assembler to switch to unlikely-to-be-executed text section. */
699
700 section *
701 unlikely_text_section (void)
702 {
703 return function_section_1 (current_function_decl, true);
704 }
705
706 /* When called within a function context, return true if the function
707 has been assigned a cold text section and if SECT is that section.
708 When called outside a function context, return true if SECT is the
709 default cold section. */
710
711 bool
712 unlikely_text_section_p (section *sect)
713 {
714 return sect == function_section_1 (current_function_decl, true);
715 }
716
717 /* Switch to the other function partition (if inside of hot section
718 into cold section, otherwise into the hot section). */
719
720 void
721 switch_to_other_text_partition (void)
722 {
723 in_cold_section_p = !in_cold_section_p;
724 switch_to_section (current_function_section ());
725 }
726
727 /* Return the read-only data section associated with function DECL. */
728
729 section *
730 default_function_rodata_section (tree decl)
731 {
732 if (decl != NULL_TREE && DECL_SECTION_NAME (decl))
733 {
734 const char *name = DECL_SECTION_NAME (decl);
735
736 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP)
737 {
738 const char *dot;
739 size_t len;
740 char* rname;
741
742 dot = strchr (name + 1, '.');
743 if (!dot)
744 dot = name;
745 len = strlen (dot) + 8;
746 rname = (char *) alloca (len);
747
748 strcpy (rname, ".rodata");
749 strcat (rname, dot);
750 return get_section (rname, SECTION_LINKONCE, decl);
751 }
752 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo. */
753 else if (DECL_COMDAT_GROUP (decl)
754 && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
755 {
756 size_t len = strlen (name) + 1;
757 char *rname = (char *) alloca (len);
758
759 memcpy (rname, name, len);
760 rname[14] = 'r';
761 return get_section (rname, SECTION_LINKONCE, decl);
762 }
763 /* For .text.foo we want to use .rodata.foo. */
764 else if (flag_function_sections && flag_data_sections
765 && strncmp (name, ".text.", 6) == 0)
766 {
767 size_t len = strlen (name) + 1;
768 char *rname = (char *) alloca (len + 2);
769
770 memcpy (rname, ".rodata", 7);
771 memcpy (rname + 7, name + 5, len - 5);
772 return get_section (rname, 0, decl);
773 }
774 }
775
776 return readonly_data_section;
777 }
778
779 /* Return the read-only data section associated with function DECL
780 for targets where that section should be always the single
781 readonly data section. */
782
783 section *
784 default_no_function_rodata_section (tree decl ATTRIBUTE_UNUSED)
785 {
786 return readonly_data_section;
787 }
788
789 /* A subroutine of mergeable_string_section and mergeable_constant_section. */
790
791 static const char *
792 function_mergeable_rodata_prefix (void)
793 {
794 section *s = targetm.asm_out.function_rodata_section (current_function_decl);
795 if (SECTION_STYLE (s) == SECTION_NAMED)
796 return s->named.name;
797 else
798 return targetm.asm_out.mergeable_rodata_prefix;
799 }
800
801 /* Return the section to use for string merging. */
802
803 static section *
804 mergeable_string_section (tree decl ATTRIBUTE_UNUSED,
805 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
806 unsigned int flags ATTRIBUTE_UNUSED)
807 {
808 HOST_WIDE_INT len;
809
810 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
811 && TREE_CODE (decl) == STRING_CST
812 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
813 && align <= 256
814 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
815 && TREE_STRING_LENGTH (decl) == len)
816 {
817 scalar_int_mode mode;
818 unsigned int modesize;
819 const char *str;
820 HOST_WIDE_INT i;
821 int j, unit;
822 const char *prefix = function_mergeable_rodata_prefix ();
823 char *name = (char *) alloca (strlen (prefix) + 30);
824
825 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
826 modesize = GET_MODE_BITSIZE (mode);
827 if (modesize >= 8 && modesize <= 256
828 && (modesize & (modesize - 1)) == 0)
829 {
830 if (align < modesize)
831 align = modesize;
832
833 if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8)
834 return readonly_data_section;
835
836 str = TREE_STRING_POINTER (decl);
837 unit = GET_MODE_SIZE (mode);
838
839 /* Check for embedded NUL characters. */
840 for (i = 0; i < len; i += unit)
841 {
842 for (j = 0; j < unit; j++)
843 if (str[i + j] != '\0')
844 break;
845 if (j == unit)
846 break;
847 }
848 if (i == len - unit || (unit == 1 && i == len))
849 {
850 sprintf (name, "%s.str%d.%d", prefix,
851 modesize / 8, (int) (align / 8));
852 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS;
853 return get_section (name, flags, NULL);
854 }
855 }
856 }
857
858 return readonly_data_section;
859 }
860
861 /* Return the section to use for constant merging. */
862
863 section *
864 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
865 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
866 unsigned int flags ATTRIBUTE_UNUSED)
867 {
868 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
869 && mode != VOIDmode
870 && mode != BLKmode
871 && known_le (GET_MODE_BITSIZE (mode), align)
872 && align >= 8
873 && align <= 256
874 && (align & (align - 1)) == 0
875 && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8))
876 {
877 const char *prefix = function_mergeable_rodata_prefix ();
878 char *name = (char *) alloca (strlen (prefix) + 30);
879
880 sprintf (name, "%s.cst%d", prefix, (int) (align / 8));
881 flags |= (align / 8) | SECTION_MERGE;
882 return get_section (name, flags, NULL);
883 }
884 return readonly_data_section;
885 }
886 \f
887 /* Given NAME, a putative register name, discard any customary prefixes. */
888
889 static const char *
890 strip_reg_name (const char *name)
891 {
892 #ifdef REGISTER_PREFIX
893 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX)))
894 name += strlen (REGISTER_PREFIX);
895 #endif
896 if (name[0] == '%' || name[0] == '#')
897 name++;
898 return name;
899 }
900 \f
901 /* The user has asked for a DECL to have a particular name. Set (or
902 change) it in such a way that we don't prefix an underscore to
903 it. */
904 void
905 set_user_assembler_name (tree decl, const char *name)
906 {
907 char *starred = (char *) alloca (strlen (name) + 2);
908 starred[0] = '*';
909 strcpy (starred + 1, name);
910 symtab->change_decl_assembler_name (decl, get_identifier (starred));
911 SET_DECL_RTL (decl, NULL_RTX);
912 }
913 \f
914 /* Decode an `asm' spec for a declaration as a register name.
915 Return the register number, or -1 if nothing specified,
916 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized,
917 or -3 if ASMSPEC is `cc' and is not recognized,
918 or -4 if ASMSPEC is `memory' and is not recognized.
919 Accept an exact spelling or a decimal number.
920 Prefixes such as % are optional. */
921
922 int
923 decode_reg_name_and_count (const char *asmspec, int *pnregs)
924 {
925 /* Presume just one register is clobbered. */
926 *pnregs = 1;
927
928 if (asmspec != 0)
929 {
930 int i;
931
932 /* Get rid of confusing prefixes. */
933 asmspec = strip_reg_name (asmspec);
934
935 /* Allow a decimal number as a "register name". */
936 for (i = strlen (asmspec) - 1; i >= 0; i--)
937 if (! ISDIGIT (asmspec[i]))
938 break;
939 if (asmspec[0] != 0 && i < 0)
940 {
941 i = atoi (asmspec);
942 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0])
943 return i;
944 else
945 return -2;
946 }
947
948 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
949 if (reg_names[i][0]
950 && ! strcmp (asmspec, strip_reg_name (reg_names[i])))
951 return i;
952
953 #ifdef OVERLAPPING_REGISTER_NAMES
954 {
955 static const struct
956 {
957 const char *const name;
958 const int number;
959 const int nregs;
960 } table[] = OVERLAPPING_REGISTER_NAMES;
961
962 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
963 if (table[i].name[0]
964 && ! strcmp (asmspec, table[i].name))
965 {
966 *pnregs = table[i].nregs;
967 return table[i].number;
968 }
969 }
970 #endif /* OVERLAPPING_REGISTER_NAMES */
971
972 #ifdef ADDITIONAL_REGISTER_NAMES
973 {
974 static const struct { const char *const name; const int number; } table[]
975 = ADDITIONAL_REGISTER_NAMES;
976
977 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
978 if (table[i].name[0]
979 && ! strcmp (asmspec, table[i].name)
980 && reg_names[table[i].number][0])
981 return table[i].number;
982 }
983 #endif /* ADDITIONAL_REGISTER_NAMES */
984
985 if (!strcmp (asmspec, "memory"))
986 return -4;
987
988 if (!strcmp (asmspec, "cc"))
989 return -3;
990
991 return -2;
992 }
993
994 return -1;
995 }
996
997 int
998 decode_reg_name (const char *name)
999 {
1000 int count;
1001 return decode_reg_name_and_count (name, &count);
1002 }
1003
1004 \f
1005 /* Return true if DECL's initializer is suitable for a BSS section. */
1006
1007 bool
1008 bss_initializer_p (const_tree decl, bool named)
1009 {
1010 /* Do not put non-common constants into the .bss section, they belong in
1011 a readonly section, except when NAMED is true. */
1012 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
1013 && (DECL_INITIAL (decl) == NULL
1014 /* In LTO we have no errors in program; error_mark_node is used
1015 to mark offlined constructors. */
1016 || (DECL_INITIAL (decl) == error_mark_node
1017 && !in_lto_p)
1018 || (flag_zero_initialized_in_bss
1019 && initializer_zerop (DECL_INITIAL (decl)))));
1020 }
1021
1022 /* Compute the alignment of variable specified by DECL.
1023 DONT_OUTPUT_DATA is from assemble_variable. */
1024
1025 void
1026 align_variable (tree decl, bool dont_output_data)
1027 {
1028 unsigned int align = DECL_ALIGN (decl);
1029
1030 /* In the case for initialing an array whose length isn't specified,
1031 where we have not yet been able to do the layout,
1032 figure out the proper alignment now. */
1033 if (dont_output_data && DECL_SIZE (decl) == 0
1034 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1035 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1036
1037 /* Some object file formats have a maximum alignment which they support.
1038 In particular, a.out format supports a maximum alignment of 4. */
1039 if (align > MAX_OFILE_ALIGNMENT)
1040 {
1041 error ("alignment of %q+D is greater than maximum object "
1042 "file alignment %d", decl,
1043 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
1044 align = MAX_OFILE_ALIGNMENT;
1045 }
1046
1047 if (! DECL_USER_ALIGN (decl))
1048 {
1049 #ifdef DATA_ABI_ALIGNMENT
1050 unsigned int data_abi_align
1051 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1052 /* For backwards compatibility, don't assume the ABI alignment for
1053 TLS variables. */
1054 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD)
1055 align = data_abi_align;
1056 #endif
1057
1058 /* On some machines, it is good to increase alignment sometimes.
1059 But as DECL_ALIGN is used both for actually emitting the variable
1060 and for code accessing the variable as guaranteed alignment, we
1061 can only increase the alignment if it is a performance optimization
1062 if the references to it must bind to the current definition. */
1063 if (decl_binds_to_current_def_p (decl)
1064 && !DECL_VIRTUAL_P (decl))
1065 {
1066 #ifdef DATA_ALIGNMENT
1067 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1068 /* Don't increase alignment too much for TLS variables - TLS space
1069 is too precious. */
1070 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1071 align = data_align;
1072 #endif
1073 if (DECL_INITIAL (decl) != 0
1074 /* In LTO we have no errors in program; error_mark_node is used
1075 to mark offlined constructors. */
1076 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1077 {
1078 unsigned int const_align
1079 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1080 /* Don't increase alignment too much for TLS variables - TLS
1081 space is too precious. */
1082 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1083 align = const_align;
1084 }
1085 }
1086 }
1087
1088 /* Reset the alignment in case we have made it tighter, so we can benefit
1089 from it in get_pointer_alignment. */
1090 SET_DECL_ALIGN (decl, align);
1091 }
1092
1093 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes
1094 beyond what align_variable returned. */
1095
1096 static unsigned int
1097 get_variable_align (tree decl)
1098 {
1099 unsigned int align = DECL_ALIGN (decl);
1100
1101 /* For user aligned vars or static vars align_variable already did
1102 everything. */
1103 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl))
1104 return align;
1105
1106 #ifdef DATA_ABI_ALIGNMENT
1107 if (DECL_THREAD_LOCAL_P (decl))
1108 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1109 #endif
1110
1111 /* For decls that bind to the current definition, align_variable
1112 did also everything, except for not assuming ABI required alignment
1113 of TLS variables. For other vars, increase the alignment here
1114 as an optimization. */
1115 if (!decl_binds_to_current_def_p (decl))
1116 {
1117 /* On some machines, it is good to increase alignment sometimes. */
1118 #ifdef DATA_ALIGNMENT
1119 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1120 /* Don't increase alignment too much for TLS variables - TLS space
1121 is too precious. */
1122 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1123 align = data_align;
1124 #endif
1125 if (DECL_INITIAL (decl) != 0
1126 /* In LTO we have no errors in program; error_mark_node is used
1127 to mark offlined constructors. */
1128 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1129 {
1130 unsigned int const_align
1131 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1132 /* Don't increase alignment too much for TLS variables - TLS space
1133 is too precious. */
1134 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1135 align = const_align;
1136 }
1137 }
1138
1139 return align;
1140 }
1141
1142 /* Return the section into which the given VAR_DECL or CONST_DECL
1143 should be placed. PREFER_NOSWITCH_P is true if a noswitch
1144 section should be used wherever possible. */
1145
1146 section *
1147 get_variable_section (tree decl, bool prefer_noswitch_p)
1148 {
1149 addr_space_t as = ADDR_SPACE_GENERIC;
1150 int reloc;
1151 varpool_node *vnode = varpool_node::get (decl);
1152 if (vnode)
1153 {
1154 vnode = vnode->ultimate_alias_target ();
1155 decl = vnode->decl;
1156 }
1157
1158 if (TREE_TYPE (decl) != error_mark_node)
1159 as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1160
1161 /* We need the constructor to figure out reloc flag. */
1162 if (vnode)
1163 vnode->get_constructor ();
1164
1165 if (DECL_COMMON (decl))
1166 {
1167 /* If the decl has been given an explicit section name, or it resides
1168 in a non-generic address space, then it isn't common, and shouldn't
1169 be handled as such. */
1170 gcc_assert (DECL_SECTION_NAME (decl) == NULL
1171 && ADDR_SPACE_GENERIC_P (as));
1172 if (DECL_THREAD_LOCAL_P (decl))
1173 return tls_comm_section;
1174 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
1175 return comm_section;
1176 }
1177
1178 if (DECL_INITIAL (decl) == error_mark_node)
1179 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
1180 else if (DECL_INITIAL (decl))
1181 reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
1182 else
1183 reloc = 0;
1184
1185 resolve_unique_section (decl, reloc, flag_data_sections);
1186 if (IN_NAMED_SECTION (decl))
1187 {
1188 section *sect = get_named_section (decl, NULL, reloc);
1189
1190 if ((sect->common.flags & SECTION_BSS)
1191 && !bss_initializer_p (decl, true))
1192 {
1193 error_at (DECL_SOURCE_LOCATION (decl),
1194 "only zero initializers are allowed in section %qs",
1195 sect->named.name);
1196 DECL_INITIAL (decl) = error_mark_node;
1197 }
1198 return sect;
1199 }
1200
1201 if (ADDR_SPACE_GENERIC_P (as)
1202 && !DECL_THREAD_LOCAL_P (decl)
1203 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections)
1204 && bss_initializer_p (decl))
1205 {
1206 if (!TREE_PUBLIC (decl)
1207 && !((flag_sanitize & SANITIZE_ADDRESS)
1208 && asan_protect_global (decl)))
1209 return lcomm_section;
1210 if (bss_noswitch_section)
1211 return bss_noswitch_section;
1212 }
1213
1214 return targetm.asm_out.select_section (decl, reloc,
1215 get_variable_align (decl));
1216 }
1217
1218 /* Return the block into which object_block DECL should be placed. */
1219
1220 static struct object_block *
1221 get_block_for_decl (tree decl)
1222 {
1223 section *sect;
1224
1225 if (VAR_P (decl))
1226 {
1227 /* The object must be defined in this translation unit. */
1228 if (DECL_EXTERNAL (decl))
1229 return NULL;
1230
1231 /* There's no point using object blocks for something that is
1232 isolated by definition. */
1233 if (DECL_COMDAT_GROUP (decl))
1234 return NULL;
1235 }
1236
1237 /* We can only calculate block offsets if the decl has a known
1238 constant size. */
1239 if (DECL_SIZE_UNIT (decl) == NULL)
1240 return NULL;
1241 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
1242 return NULL;
1243
1244 /* Find out which section should contain DECL. We cannot put it into
1245 an object block if it requires a standalone definition. */
1246 if (VAR_P (decl))
1247 align_variable (decl, 0);
1248 sect = get_variable_section (decl, true);
1249 if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
1250 return NULL;
1251
1252 return get_block_for_section (sect);
1253 }
1254
1255 /* Make sure block symbol SYMBOL is in block BLOCK. */
1256
1257 static void
1258 change_symbol_block (rtx symbol, struct object_block *block)
1259 {
1260 if (block != SYMBOL_REF_BLOCK (symbol))
1261 {
1262 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
1263 SYMBOL_REF_BLOCK (symbol) = block;
1264 }
1265 }
1266
1267 /* Return true if it is possible to put DECL in an object_block. */
1268
1269 static bool
1270 use_blocks_for_decl_p (tree decl)
1271 {
1272 struct symtab_node *snode;
1273
1274 /* Only data DECLs can be placed into object blocks. */
1275 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
1276 return false;
1277
1278 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
1279 are never used from code directly and we never want object block handling
1280 for those. */
1281 if (DECL_INITIAL (decl) == decl)
1282 return false;
1283
1284 /* If this decl is an alias, then we don't want to emit a
1285 definition. */
1286 if (VAR_P (decl)
1287 && (snode = symtab_node::get (decl)) != NULL
1288 && snode->alias)
1289 return false;
1290
1291 return targetm.use_blocks_for_decl_p (decl);
1292 }
1293
1294 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS
1295 until we find an identifier that is not itself a transparent alias.
1296 Modify the alias passed to it by reference (and all aliases on the
1297 way to the ultimate target), such that they do not have to be
1298 followed again, and return the ultimate target of the alias
1299 chain. */
1300
1301 static inline tree
1302 ultimate_transparent_alias_target (tree *alias)
1303 {
1304 tree target = *alias;
1305
1306 if (IDENTIFIER_TRANSPARENT_ALIAS (target))
1307 {
1308 gcc_assert (TREE_CHAIN (target));
1309 target = ultimate_transparent_alias_target (&TREE_CHAIN (target));
1310 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target)
1311 && ! TREE_CHAIN (target));
1312 *alias = target;
1313 }
1314
1315 return target;
1316 }
1317
1318 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
1319 have static storage duration. In other words, it should not be an
1320 automatic variable, including PARM_DECLs.
1321
1322 There is, however, one exception: this function handles variables
1323 explicitly placed in a particular register by the user.
1324
1325 This is never called for PARM_DECL nodes. */
1326
1327 void
1328 make_decl_rtl (tree decl)
1329 {
1330 const char *name = 0;
1331 int reg_number;
1332 tree id;
1333 rtx x;
1334
1335 /* Check that we are not being given an automatic variable. */
1336 gcc_assert (TREE_CODE (decl) != PARM_DECL
1337 && TREE_CODE (decl) != RESULT_DECL);
1338
1339 /* A weak alias has TREE_PUBLIC set but not the other bits. */
1340 gcc_assert (!VAR_P (decl)
1341 || TREE_STATIC (decl)
1342 || TREE_PUBLIC (decl)
1343 || DECL_EXTERNAL (decl)
1344 || DECL_REGISTER (decl));
1345
1346 /* And that we were not given a type or a label. */
1347 gcc_assert (TREE_CODE (decl) != TYPE_DECL
1348 && TREE_CODE (decl) != LABEL_DECL);
1349
1350 /* For a duplicate declaration, we can be called twice on the
1351 same DECL node. Don't discard the RTL already made. */
1352 if (DECL_RTL_SET_P (decl))
1353 {
1354 /* If the old RTL had the wrong mode, fix the mode. */
1355 x = DECL_RTL (decl);
1356 if (GET_MODE (x) != DECL_MODE (decl))
1357 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
1358
1359 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1360 return;
1361
1362 /* ??? Another way to do this would be to maintain a hashed
1363 table of such critters. Instead of adding stuff to a DECL
1364 to give certain attributes to it, we could use an external
1365 hash map from DECL to set of attributes. */
1366
1367 /* Let the target reassign the RTL if it wants.
1368 This is necessary, for example, when one machine specific
1369 decl attribute overrides another. */
1370 targetm.encode_section_info (decl, DECL_RTL (decl), false);
1371
1372 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based
1373 on the new decl information. */
1374 if (MEM_P (x)
1375 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1376 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
1377 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
1378
1379 return;
1380 }
1381
1382 /* If this variable belongs to the global constant pool, retrieve the
1383 pre-computed RTL or recompute it in LTO mode. */
1384 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
1385 {
1386 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1));
1387 return;
1388 }
1389
1390 id = DECL_ASSEMBLER_NAME (decl);
1391 name = IDENTIFIER_POINTER (id);
1392
1393 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
1394 && DECL_REGISTER (decl))
1395 {
1396 error ("register name not specified for %q+D", decl);
1397 }
1398 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1399 {
1400 const char *asmspec = name+1;
1401 machine_mode mode = DECL_MODE (decl);
1402 reg_number = decode_reg_name (asmspec);
1403 /* First detect errors in declaring global registers. */
1404 if (reg_number == -1)
1405 error ("register name not specified for %q+D", decl);
1406 else if (reg_number < 0)
1407 error ("invalid register name for %q+D", decl);
1408 else if (mode == BLKmode)
1409 error ("data type of %q+D isn%'t suitable for a register",
1410 decl);
1411 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number))
1412 error ("the register specified for %q+D cannot be accessed"
1413 " by the current target", decl);
1414 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
1415 error ("the register specified for %q+D is not general enough"
1416 " to be used as a register variable", decl);
1417 else if (!targetm.hard_regno_mode_ok (reg_number, mode))
1418 error ("register specified for %q+D isn%'t suitable for data type",
1419 decl);
1420 /* Now handle properly declared static register variables. */
1421 else
1422 {
1423 int nregs;
1424
1425 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl))
1426 {
1427 DECL_INITIAL (decl) = 0;
1428 error ("global register variable has initial value");
1429 }
1430 if (TREE_THIS_VOLATILE (decl))
1431 warning (OPT_Wvolatile_register_var,
1432 "optimization may eliminate reads and/or "
1433 "writes to register variables");
1434
1435 /* If the user specified one of the eliminables registers here,
1436 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable
1437 confused with that register and be eliminated. This usage is
1438 somewhat suspect... */
1439
1440 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number));
1441 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number;
1442 REG_USERVAR_P (DECL_RTL (decl)) = 1;
1443
1444 if (TREE_STATIC (decl))
1445 {
1446 /* Make this register global, so not usable for anything
1447 else. */
1448 #ifdef ASM_DECLARE_REGISTER_GLOBAL
1449 name = IDENTIFIER_POINTER (DECL_NAME (decl));
1450 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
1451 #endif
1452 nregs = hard_regno_nregs (reg_number, mode);
1453 while (nregs > 0)
1454 globalize_reg (decl, reg_number + --nregs);
1455 }
1456
1457 /* As a register variable, it has no section. */
1458 return;
1459 }
1460 /* Avoid internal errors from invalid register
1461 specifications. */
1462 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE);
1463 DECL_HARD_REGISTER (decl) = 0;
1464 /* Also avoid SSA inconsistencies by pretending this is an external
1465 decl now. */
1466 DECL_EXTERNAL (decl) = 1;
1467 return;
1468 }
1469 /* Now handle ordinary static variables and functions (in memory).
1470 Also handle vars declared register invalidly. */
1471 else if (name[0] == '*')
1472 {
1473 #ifdef REGISTER_PREFIX
1474 if (strlen (REGISTER_PREFIX) != 0)
1475 {
1476 reg_number = decode_reg_name (name);
1477 if (reg_number >= 0 || reg_number == -3)
1478 error ("register name given for non-register variable %q+D", decl);
1479 }
1480 #endif
1481 }
1482
1483 /* Specifying a section attribute on a variable forces it into a
1484 non-.bss section, and thus it cannot be common. */
1485 /* FIXME: In general this code should not be necessary because
1486 visibility pass is doing the same work. But notice_global_symbol
1487 is called early and it needs to make DECL_RTL to get the name.
1488 we take care of recomputing the DECL_RTL after visibility is changed. */
1489 if (VAR_P (decl)
1490 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1491 && DECL_SECTION_NAME (decl) != NULL
1492 && DECL_INITIAL (decl) == NULL_TREE
1493 && DECL_COMMON (decl))
1494 DECL_COMMON (decl) = 0;
1495
1496 /* Variables can't be both common and weak. */
1497 if (VAR_P (decl) && DECL_WEAK (decl))
1498 DECL_COMMON (decl) = 0;
1499
1500 if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
1501 x = create_block_symbol (name, get_block_for_decl (decl), -1);
1502 else
1503 {
1504 machine_mode address_mode = Pmode;
1505 if (TREE_TYPE (decl) != error_mark_node)
1506 {
1507 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1508 address_mode = targetm.addr_space.address_mode (as);
1509 }
1510 x = gen_rtx_SYMBOL_REF (address_mode, name);
1511 }
1512 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
1513 SET_SYMBOL_REF_DECL (x, decl);
1514
1515 x = gen_rtx_MEM (DECL_MODE (decl), x);
1516 if (TREE_CODE (decl) != FUNCTION_DECL)
1517 set_mem_attributes (x, decl, 1);
1518 SET_DECL_RTL (decl, x);
1519
1520 /* Optionally set flags or add text to the name to record information
1521 such as that it is a function name.
1522 If the name is changed, the macro ASM_OUTPUT_LABELREF
1523 will have to know how to strip this information. */
1524 targetm.encode_section_info (decl, DECL_RTL (decl), true);
1525 }
1526
1527 /* Like make_decl_rtl, but inhibit creation of new alias sets when
1528 calling make_decl_rtl. Also, reset DECL_RTL before returning the
1529 rtl. */
1530
1531 rtx
1532 make_decl_rtl_for_debug (tree decl)
1533 {
1534 unsigned int save_aliasing_flag;
1535 rtx rtl;
1536
1537 if (DECL_RTL_SET_P (decl))
1538 return DECL_RTL (decl);
1539
1540 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will
1541 call new_alias_set. If running with -fcompare-debug, sometimes
1542 we do not want to create alias sets that will throw the alias
1543 numbers off in the comparison dumps. So... clearing
1544 flag_strict_aliasing will keep new_alias_set() from creating a
1545 new set. */
1546 save_aliasing_flag = flag_strict_aliasing;
1547 flag_strict_aliasing = 0;
1548
1549 rtl = DECL_RTL (decl);
1550 /* Reset DECL_RTL back, as various parts of the compiler expects
1551 DECL_RTL set meaning it is actually going to be output. */
1552 SET_DECL_RTL (decl, NULL);
1553
1554 flag_strict_aliasing = save_aliasing_flag;
1555 return rtl;
1556 }
1557 \f
1558 /* Output a string of literal assembler code
1559 for an `asm' keyword used between functions. */
1560
1561 void
1562 assemble_asm (tree string)
1563 {
1564 const char *p;
1565 app_enable ();
1566
1567 if (TREE_CODE (string) == ADDR_EXPR)
1568 string = TREE_OPERAND (string, 0);
1569
1570 p = TREE_STRING_POINTER (string);
1571 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p);
1572 }
1573
1574 /* Write the address of the entity given by SYMBOL to SEC. */
1575 void
1576 assemble_addr_to_section (rtx symbol, section *sec)
1577 {
1578 switch_to_section (sec);
1579 assemble_align (POINTER_SIZE);
1580 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1);
1581 }
1582
1583 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
1584 not) section for PRIORITY. */
1585 section *
1586 get_cdtor_priority_section (int priority, bool constructor_p)
1587 {
1588 /* Buffer conservatively large enough for the full range of a 32-bit
1589 int plus the text below. */
1590 char buf[18];
1591
1592 /* ??? This only works reliably with the GNU linker. */
1593 sprintf (buf, "%s.%.5u",
1594 constructor_p ? ".ctors" : ".dtors",
1595 /* Invert the numbering so the linker puts us in the proper
1596 order; constructors are run from right to left, and the
1597 linker sorts in increasing order. */
1598 MAX_INIT_PRIORITY - priority);
1599 return get_section (buf, SECTION_WRITE, NULL);
1600 }
1601
1602 void
1603 default_named_section_asm_out_destructor (rtx symbol, int priority)
1604 {
1605 section *sec;
1606
1607 if (priority != DEFAULT_INIT_PRIORITY)
1608 sec = get_cdtor_priority_section (priority,
1609 /*constructor_p=*/false);
1610 else
1611 sec = get_section (".dtors", SECTION_WRITE, NULL);
1612
1613 assemble_addr_to_section (symbol, sec);
1614 }
1615
1616 #ifdef DTORS_SECTION_ASM_OP
1617 void
1618 default_dtor_section_asm_out_destructor (rtx symbol,
1619 int priority ATTRIBUTE_UNUSED)
1620 {
1621 assemble_addr_to_section (symbol, dtors_section);
1622 }
1623 #endif
1624
1625 void
1626 default_named_section_asm_out_constructor (rtx symbol, int priority)
1627 {
1628 section *sec;
1629
1630 if (priority != DEFAULT_INIT_PRIORITY)
1631 sec = get_cdtor_priority_section (priority,
1632 /*constructor_p=*/true);
1633 else
1634 sec = get_section (".ctors", SECTION_WRITE, NULL);
1635
1636 assemble_addr_to_section (symbol, sec);
1637 }
1638
1639 #ifdef CTORS_SECTION_ASM_OP
1640 void
1641 default_ctor_section_asm_out_constructor (rtx symbol,
1642 int priority ATTRIBUTE_UNUSED)
1643 {
1644 assemble_addr_to_section (symbol, ctors_section);
1645 }
1646 #endif
1647 \f
1648 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with
1649 a nonzero value if the constant pool should be output before the
1650 start of the function, or a zero value if the pool should output
1651 after the end of the function. The default is to put it before the
1652 start. */
1653
1654 #ifndef CONSTANT_POOL_BEFORE_FUNCTION
1655 #define CONSTANT_POOL_BEFORE_FUNCTION 1
1656 #endif
1657
1658 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going
1659 to be output to assembler.
1660 Set first_global_object_name and weak_global_object_name as appropriate. */
1661
1662 void
1663 notice_global_symbol (tree decl)
1664 {
1665 const char **t = &first_global_object_name;
1666
1667 if (first_global_object_name
1668 || !TREE_PUBLIC (decl)
1669 || DECL_EXTERNAL (decl)
1670 || !DECL_NAME (decl)
1671 || (VAR_P (decl) && DECL_HARD_REGISTER (decl))
1672 || (TREE_CODE (decl) != FUNCTION_DECL
1673 && (!VAR_P (decl)
1674 || (DECL_COMMON (decl)
1675 && (DECL_INITIAL (decl) == 0
1676 || DECL_INITIAL (decl) == error_mark_node)))))
1677 return;
1678
1679 /* We win when global object is found, but it is useful to know about weak
1680 symbol as well so we can produce nicer unique names. */
1681 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
1682 t = &weak_global_object_name;
1683
1684 if (!*t)
1685 {
1686 tree id = DECL_ASSEMBLER_NAME (decl);
1687 ultimate_transparent_alias_target (&id);
1688 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id)));
1689 }
1690 }
1691
1692 /* If not using flag_reorder_blocks_and_partition, decide early whether the
1693 current function goes into the cold section, so that targets can use
1694 current_function_section during RTL expansion. DECL describes the
1695 function. */
1696
1697 void
1698 decide_function_section (tree decl)
1699 {
1700 first_function_block_is_cold = false;
1701
1702 if (DECL_SECTION_NAME (decl))
1703 {
1704 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1705 /* Calls to function_section rely on first_function_block_is_cold
1706 being accurate. */
1707 first_function_block_is_cold = (node
1708 && node->frequency
1709 == NODE_FREQUENCY_UNLIKELY_EXECUTED);
1710 }
1711
1712 in_cold_section_p = first_function_block_is_cold;
1713 }
1714
1715 /* Get the function's name, as described by its RTL. This may be
1716 different from the DECL_NAME name used in the source file. */
1717 const char *
1718 get_fnname_from_decl (tree decl)
1719 {
1720 rtx x = DECL_RTL (decl);
1721 gcc_assert (MEM_P (x));
1722 x = XEXP (x, 0);
1723 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1724 return XSTR (x, 0);
1725 }
1726
1727 /* Output assembler code for the constant pool of a function and associated
1728 with defining the name of the function. DECL describes the function.
1729 NAME is the function's name. For the constant pool, we use the current
1730 constant pool data. */
1731
1732 void
1733 assemble_start_function (tree decl, const char *fnname)
1734 {
1735 int align;
1736 char tmp_label[100];
1737 bool hot_label_written = false;
1738
1739 if (crtl->has_bb_partition)
1740 {
1741 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
1742 crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
1743 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
1744 crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
1745 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
1746 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
1747 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
1748 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
1749 const_labelno++;
1750 cold_function_name = NULL_TREE;
1751 }
1752 else
1753 {
1754 crtl->subsections.hot_section_label = NULL;
1755 crtl->subsections.cold_section_label = NULL;
1756 crtl->subsections.hot_section_end_label = NULL;
1757 crtl->subsections.cold_section_end_label = NULL;
1758 }
1759
1760 /* The following code does not need preprocessing in the assembler. */
1761
1762 app_disable ();
1763
1764 if (CONSTANT_POOL_BEFORE_FUNCTION)
1765 output_constant_pool (fnname, decl);
1766
1767 align = symtab_node::get (decl)->definition_alignment ();
1768
1769 /* Make sure the not and cold text (code) sections are properly
1770 aligned. This is necessary here in the case where the function
1771 has both hot and cold sections, because we don't want to re-set
1772 the alignment when the section switch happens mid-function. */
1773
1774 if (crtl->has_bb_partition)
1775 {
1776 first_function_block_is_cold = false;
1777
1778 switch_to_section (unlikely_text_section ());
1779 assemble_align (align);
1780 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
1781
1782 /* When the function starts with a cold section, we need to explicitly
1783 align the hot section and write out the hot section label.
1784 But if the current function is a thunk, we do not have a CFG. */
1785 if (!cfun->is_thunk
1786 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
1787 {
1788 switch_to_section (text_section);
1789 assemble_align (align);
1790 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1791 hot_label_written = true;
1792 first_function_block_is_cold = true;
1793 }
1794 in_cold_section_p = first_function_block_is_cold;
1795 }
1796
1797
1798 /* Switch to the correct text section for the start of the function. */
1799
1800 switch_to_section (function_section (decl));
1801 if (crtl->has_bb_partition && !hot_label_written)
1802 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1803
1804 /* Tell assembler to move to target machine's alignment for functions. */
1805 align = floor_log2 (align / BITS_PER_UNIT);
1806 if (align > 0)
1807 {
1808 ASM_OUTPUT_ALIGN (asm_out_file, align);
1809 }
1810
1811 /* Handle a user-specified function alignment.
1812 Note that we still need to align to DECL_ALIGN, as above,
1813 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
1814 if (! DECL_USER_ALIGN (decl)
1815 && align_functions.levels[0].log > align
1816 && optimize_function_for_speed_p (cfun))
1817 {
1818 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1819 int align_log = align_functions.levels[0].log;
1820 #endif
1821 int max_skip = align_functions.levels[0].maxskip;
1822 if (flag_limit_function_alignment && crtl->max_insn_address > 0
1823 && max_skip >= crtl->max_insn_address)
1824 max_skip = crtl->max_insn_address - 1;
1825
1826 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1827 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
1828 if (max_skip == align_functions.levels[0].maxskip)
1829 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
1830 align_functions.levels[1].log,
1831 align_functions.levels[1].maxskip);
1832 #else
1833 ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log);
1834 #endif
1835 }
1836
1837 #ifdef ASM_OUTPUT_FUNCTION_PREFIX
1838 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname);
1839 #endif
1840
1841 if (!DECL_IGNORED_P (decl))
1842 (*debug_hooks->begin_function) (decl);
1843
1844 /* Make function name accessible from other files, if appropriate. */
1845
1846 if (TREE_PUBLIC (decl))
1847 {
1848 notice_global_symbol (decl);
1849
1850 globalize_decl (decl);
1851
1852 maybe_assemble_visibility (decl);
1853 }
1854
1855 if (DECL_PRESERVE_P (decl))
1856 targetm.asm_out.mark_decl_preserved (fnname);
1857
1858 unsigned HOST_WIDE_INT patch_area_size = function_entry_patch_area_size;
1859 unsigned HOST_WIDE_INT patch_area_entry = function_entry_patch_area_start;
1860
1861 tree patchable_function_entry_attr
1862 = lookup_attribute ("patchable_function_entry", DECL_ATTRIBUTES (decl));
1863 if (patchable_function_entry_attr)
1864 {
1865 tree pp_val = TREE_VALUE (patchable_function_entry_attr);
1866 tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
1867
1868 patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
1869 patch_area_entry = 0;
1870 if (TREE_CHAIN (pp_val) != NULL_TREE)
1871 {
1872 tree patchable_function_entry_value2
1873 = TREE_VALUE (TREE_CHAIN (pp_val));
1874 patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
1875 }
1876 }
1877
1878 if (patch_area_entry > patch_area_size)
1879 {
1880 if (patch_area_size > 0)
1881 warning (OPT_Wattributes,
1882 "patchable function entry %wu exceeds size %wu",
1883 patch_area_entry, patch_area_size);
1884 patch_area_entry = 0;
1885 }
1886
1887 /* Emit the patching area before the entry label, if any. */
1888 if (patch_area_entry > 0)
1889 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1890 patch_area_entry, true);
1891
1892 /* Do any machine/system dependent processing of the function name. */
1893 #ifdef ASM_DECLARE_FUNCTION_NAME
1894 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
1895 #else
1896 /* Standard thing is just output label for the function. */
1897 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
1898 #endif /* ASM_DECLARE_FUNCTION_NAME */
1899
1900 /* And the area after the label. Record it if we haven't done so yet. */
1901 if (patch_area_size > patch_area_entry)
1902 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1903 patch_area_size
1904 - patch_area_entry,
1905 patch_area_entry == 0);
1906
1907 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
1908 saw_no_split_stack = true;
1909 }
1910
1911 /* Output assembler code associated with defining the size of the
1912 function. DECL describes the function. NAME is the function's name. */
1913
1914 void
1915 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED)
1916 {
1917 #ifdef ASM_DECLARE_FUNCTION_SIZE
1918 /* We could have switched section in the middle of the function. */
1919 if (crtl->has_bb_partition)
1920 switch_to_section (function_section (decl));
1921 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
1922 #endif
1923 if (! CONSTANT_POOL_BEFORE_FUNCTION)
1924 {
1925 output_constant_pool (fnname, decl);
1926 switch_to_section (function_section (decl)); /* need to switch back */
1927 }
1928 /* Output labels for end of hot/cold text sections (to be used by
1929 debug info.) */
1930 if (crtl->has_bb_partition)
1931 {
1932 section *save_text_section;
1933
1934 save_text_section = in_section;
1935 switch_to_section (unlikely_text_section ());
1936 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE
1937 if (cold_function_name != NULL_TREE)
1938 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file,
1939 IDENTIFIER_POINTER (cold_function_name),
1940 decl);
1941 #endif
1942 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
1943 if (first_function_block_is_cold)
1944 switch_to_section (text_section);
1945 else
1946 switch_to_section (function_section (decl));
1947 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
1948 switch_to_section (save_text_section);
1949 }
1950 }
1951 \f
1952 /* Assemble code to leave SIZE bytes of zeros. */
1953
1954 void
1955 assemble_zeros (unsigned HOST_WIDE_INT size)
1956 {
1957 /* Do no output if -fsyntax-only. */
1958 if (flag_syntax_only)
1959 return;
1960
1961 #ifdef ASM_NO_SKIP_IN_TEXT
1962 /* The `space' pseudo in the text section outputs nop insns rather than 0s,
1963 so we must output 0s explicitly in the text section. */
1964 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0)
1965 {
1966 unsigned HOST_WIDE_INT i;
1967 for (i = 0; i < size; i++)
1968 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1);
1969 }
1970 else
1971 #endif
1972 if (size > 0)
1973 ASM_OUTPUT_SKIP (asm_out_file, size);
1974 }
1975
1976 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
1977
1978 void
1979 assemble_align (unsigned int align)
1980 {
1981 if (align > BITS_PER_UNIT)
1982 {
1983 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
1984 }
1985 }
1986
1987 /* Assemble a string constant with the specified C string as contents. */
1988
1989 void
1990 assemble_string (const char *p, int size)
1991 {
1992 int pos = 0;
1993 int maximum = 2000;
1994
1995 /* If the string is very long, split it up. */
1996
1997 while (pos < size)
1998 {
1999 int thissize = size - pos;
2000 if (thissize > maximum)
2001 thissize = maximum;
2002
2003 ASM_OUTPUT_ASCII (asm_out_file, p, thissize);
2004
2005 pos += thissize;
2006 p += thissize;
2007 }
2008 }
2009
2010 \f
2011 /* A noswitch_section_callback for lcomm_section. */
2012
2013 static bool
2014 emit_local (tree decl ATTRIBUTE_UNUSED,
2015 const char *name ATTRIBUTE_UNUSED,
2016 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2017 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2018 {
2019 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL
2020 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2021 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name,
2022 size, align);
2023 return true;
2024 #elif defined ASM_OUTPUT_ALIGNED_LOCAL
2025 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2026 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align);
2027 return true;
2028 #else
2029 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2030 return false;
2031 #endif
2032 }
2033
2034 /* A noswitch_section_callback for bss_noswitch_section. */
2035
2036 #if defined ASM_OUTPUT_ALIGNED_BSS
2037 static bool
2038 emit_bss (tree decl ATTRIBUTE_UNUSED,
2039 const char *name ATTRIBUTE_UNUSED,
2040 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2041 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2042 {
2043 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size,
2044 get_variable_align (decl));
2045 return true;
2046 }
2047 #endif
2048
2049 /* A noswitch_section_callback for comm_section. */
2050
2051 static bool
2052 emit_common (tree decl ATTRIBUTE_UNUSED,
2053 const char *name ATTRIBUTE_UNUSED,
2054 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2055 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2056 {
2057 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON
2058 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name,
2059 size, get_variable_align (decl));
2060 return true;
2061 #elif defined ASM_OUTPUT_ALIGNED_COMMON
2062 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size,
2063 get_variable_align (decl));
2064 return true;
2065 #else
2066 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded);
2067 return false;
2068 #endif
2069 }
2070
2071 /* A noswitch_section_callback for tls_comm_section. */
2072
2073 static bool
2074 emit_tls_common (tree decl ATTRIBUTE_UNUSED,
2075 const char *name ATTRIBUTE_UNUSED,
2076 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2077 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2078 {
2079 #ifdef ASM_OUTPUT_TLS_COMMON
2080 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size);
2081 return true;
2082 #else
2083 sorry ("thread-local COMMON data not implemented");
2084 return true;
2085 #endif
2086 }
2087
2088 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT.
2089 NAME is the name of DECL's SYMBOL_REF. */
2090
2091 static void
2092 assemble_noswitch_variable (tree decl, const char *name, section *sect,
2093 unsigned int align)
2094 {
2095 unsigned HOST_WIDE_INT size, rounded;
2096
2097 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2098 rounded = size;
2099
2100 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
2101 size += asan_red_zone_size (size);
2102
2103 /* Don't allocate zero bytes of common,
2104 since that means "undefined external" in the linker. */
2105 if (size == 0)
2106 rounded = 1;
2107
2108 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2109 so that each uninitialized object starts on such a boundary. */
2110 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1;
2111 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2112 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2113
2114 if (!sect->noswitch.callback (decl, name, size, rounded)
2115 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
2116 error ("requested alignment for %q+D is greater than "
2117 "implemented alignment of %wu", decl, rounded);
2118 }
2119
2120 /* A subroutine of assemble_variable. Output the label and contents of
2121 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
2122 is as for assemble_variable. */
2123
2124 static void
2125 assemble_variable_contents (tree decl, const char *name,
2126 bool dont_output_data, bool merge_strings)
2127 {
2128 /* Do any machine/system dependent processing of the object. */
2129 #ifdef ASM_DECLARE_OBJECT_NAME
2130 last_assemble_variable_decl = decl;
2131 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
2132 #else
2133 /* Standard thing is just output label for the object. */
2134 ASM_OUTPUT_LABEL (asm_out_file, name);
2135 #endif /* ASM_DECLARE_OBJECT_NAME */
2136
2137 if (!dont_output_data)
2138 {
2139 /* Caller is supposed to use varpool_get_constructor when it wants
2140 to output the body. */
2141 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node);
2142 if (DECL_INITIAL (decl)
2143 && DECL_INITIAL (decl) != error_mark_node
2144 && !initializer_zerop (DECL_INITIAL (decl)))
2145 /* Output the actual data. */
2146 output_constant (DECL_INITIAL (decl),
2147 tree_to_uhwi (DECL_SIZE_UNIT (decl)),
2148 get_variable_align (decl),
2149 false, merge_strings);
2150 else
2151 /* Leave space for it. */
2152 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
2153 targetm.asm_out.decl_end ();
2154 }
2155 }
2156
2157 /* Write out assembly for the variable DECL, which is not defined in
2158 the current translation unit. */
2159 void
2160 assemble_undefined_decl (tree decl)
2161 {
2162 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2163 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl);
2164 }
2165
2166 /* Assemble everything that is needed for a variable or function declaration.
2167 Not used for automatic variables, and not used for function definitions.
2168 Should not be called for variables of incomplete structure type.
2169
2170 TOP_LEVEL is nonzero if this variable has file scope.
2171 AT_END is nonzero if this is the special handling, at end of compilation,
2172 to define things that have had only tentative definitions.
2173 DONT_OUTPUT_DATA if nonzero means don't actually output the
2174 initial value (that will be done by the caller). */
2175
2176 void
2177 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
2178 int at_end ATTRIBUTE_UNUSED, int dont_output_data)
2179 {
2180 const char *name;
2181 rtx decl_rtl, symbol;
2182 section *sect;
2183 unsigned int align;
2184 bool asan_protected = false;
2185
2186 /* This function is supposed to handle VARIABLES. Ensure we have one. */
2187 gcc_assert (VAR_P (decl));
2188
2189 /* Emulated TLS had better not get this far. */
2190 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl));
2191
2192 last_assemble_variable_decl = 0;
2193
2194 /* Normally no need to say anything here for external references,
2195 since assemble_external is called by the language-specific code
2196 when a declaration is first seen. */
2197
2198 if (DECL_EXTERNAL (decl))
2199 return;
2200
2201 /* Do nothing for global register variables. */
2202 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)))
2203 {
2204 TREE_ASM_WRITTEN (decl) = 1;
2205 return;
2206 }
2207
2208 /* If type was incomplete when the variable was declared,
2209 see if it is complete now. */
2210
2211 if (DECL_SIZE (decl) == 0)
2212 layout_decl (decl, 0);
2213
2214 /* Still incomplete => don't allocate it; treat the tentative defn
2215 (which is what it must have been) as an `extern' reference. */
2216
2217 if (!dont_output_data && DECL_SIZE (decl) == 0)
2218 {
2219 error ("storage size of %q+D isn%'t known", decl);
2220 TREE_ASM_WRITTEN (decl) = 1;
2221 return;
2222 }
2223
2224 /* The first declaration of a variable that comes through this function
2225 decides whether it is global (in C, has external linkage)
2226 or local (in C, has internal linkage). So do nothing more
2227 if this function has already run. */
2228
2229 if (TREE_ASM_WRITTEN (decl))
2230 return;
2231
2232 /* Make sure targetm.encode_section_info is invoked before we set
2233 ASM_WRITTEN. */
2234 decl_rtl = DECL_RTL (decl);
2235
2236 TREE_ASM_WRITTEN (decl) = 1;
2237
2238 /* Do no output if -fsyntax-only. */
2239 if (flag_syntax_only)
2240 return;
2241
2242 if (! dont_output_data
2243 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl)))
2244 {
2245 error ("size of variable %q+D is too large", decl);
2246 return;
2247 }
2248
2249 gcc_assert (MEM_P (decl_rtl));
2250 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
2251 symbol = XEXP (decl_rtl, 0);
2252
2253 /* If this symbol belongs to the tree constant pool, output the constant
2254 if it hasn't already been written. */
2255 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2256 {
2257 tree decl = SYMBOL_REF_DECL (symbol);
2258 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
2259 output_constant_def_contents (symbol);
2260 return;
2261 }
2262
2263 app_disable ();
2264
2265 name = XSTR (symbol, 0);
2266 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
2267 notice_global_symbol (decl);
2268
2269 /* Compute the alignment of this data. */
2270
2271 align_variable (decl, dont_output_data);
2272
2273 if ((flag_sanitize & SANITIZE_ADDRESS)
2274 && asan_protect_global (decl))
2275 {
2276 asan_protected = true;
2277 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
2278 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
2279 }
2280
2281 set_mem_align (decl_rtl, DECL_ALIGN (decl));
2282
2283 align = get_variable_align (decl);
2284
2285 if (TREE_PUBLIC (decl))
2286 maybe_assemble_visibility (decl);
2287
2288 if (DECL_PRESERVE_P (decl))
2289 targetm.asm_out.mark_decl_preserved (name);
2290
2291 /* First make the assembler name(s) global if appropriate. */
2292 sect = get_variable_section (decl, false);
2293 if (TREE_PUBLIC (decl)
2294 && (sect->common.flags & SECTION_COMMON) == 0)
2295 globalize_decl (decl);
2296
2297 /* Output any data that we will need to use the address of. */
2298 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
2299 output_addressed_constants (DECL_INITIAL (decl));
2300
2301 /* dbxout.c needs to know this. */
2302 if (sect && (sect->common.flags & SECTION_CODE) != 0)
2303 DECL_IN_TEXT_SECTION (decl) = 1;
2304
2305 /* If the decl is part of an object_block, make sure that the decl
2306 has been positioned within its block, but do not write out its
2307 definition yet. output_object_blocks will do that later. */
2308 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
2309 {
2310 gcc_assert (!dont_output_data);
2311 place_block_symbol (symbol);
2312 }
2313 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
2314 assemble_noswitch_variable (decl, name, sect, align);
2315 else
2316 {
2317 /* Special-case handling of vtv comdat sections. */
2318 if (sect->named.name
2319 && (strcmp (sect->named.name, ".vtable_map_vars") == 0))
2320 handle_vtv_comdat_section (sect, decl);
2321 else
2322 switch_to_section (sect);
2323 if (align > BITS_PER_UNIT)
2324 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2325 assemble_variable_contents (decl, name, dont_output_data,
2326 (sect->common.flags & SECTION_MERGE)
2327 && (sect->common.flags & SECTION_STRINGS));
2328 if (asan_protected)
2329 {
2330 unsigned HOST_WIDE_INT int size
2331 = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2332 assemble_zeros (asan_red_zone_size (size));
2333 }
2334 }
2335 }
2336
2337
2338 /* Given a function declaration (FN_DECL), this function assembles the
2339 function into the .preinit_array section. */
2340
2341 void
2342 assemble_vtv_preinit_initializer (tree fn_decl)
2343 {
2344 section *sect;
2345 unsigned flags = SECTION_WRITE;
2346 rtx symbol = XEXP (DECL_RTL (fn_decl), 0);
2347
2348 flags |= SECTION_NOTYPE;
2349 sect = get_section (".preinit_array", flags, fn_decl);
2350 switch_to_section (sect);
2351 assemble_addr_to_section (symbol, sect);
2352 }
2353
2354 /* Return 1 if type TYPE contains any pointers. */
2355
2356 static int
2357 contains_pointers_p (tree type)
2358 {
2359 switch (TREE_CODE (type))
2360 {
2361 case POINTER_TYPE:
2362 case REFERENCE_TYPE:
2363 /* I'm not sure whether OFFSET_TYPE needs this treatment,
2364 so I'll play safe and return 1. */
2365 case OFFSET_TYPE:
2366 return 1;
2367
2368 case RECORD_TYPE:
2369 case UNION_TYPE:
2370 case QUAL_UNION_TYPE:
2371 {
2372 tree fields;
2373 /* For a type that has fields, see if the fields have pointers. */
2374 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields))
2375 if (TREE_CODE (fields) == FIELD_DECL
2376 && contains_pointers_p (TREE_TYPE (fields)))
2377 return 1;
2378 return 0;
2379 }
2380
2381 case ARRAY_TYPE:
2382 /* An array type contains pointers if its element type does. */
2383 return contains_pointers_p (TREE_TYPE (type));
2384
2385 default:
2386 return 0;
2387 }
2388 }
2389
2390 /* We delay assemble_external processing until
2391 the compilation unit is finalized. This is the best we can do for
2392 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
2393 it all the way to final. See PR 17982 for further discussion. */
2394 static GTY(()) tree pending_assemble_externals;
2395
2396 #ifdef ASM_OUTPUT_EXTERNAL
2397 /* Some targets delay some output to final using TARGET_ASM_FILE_END.
2398 As a result, assemble_external can be called after the list of externals
2399 is processed and the pointer set destroyed. */
2400 static bool pending_assemble_externals_processed;
2401
2402 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals
2403 TREE_LIST in assemble_external. */
2404 static hash_set<tree> *pending_assemble_externals_set;
2405
2406 /* True if DECL is a function decl for which no out-of-line copy exists.
2407 It is assumed that DECL's assembler name has been set. */
2408
2409 static bool
2410 incorporeal_function_p (tree decl)
2411 {
2412 if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl))
2413 {
2414 const char *name;
2415
2416 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2417 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2418 return true;
2419
2420 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2421 /* Atomic or sync builtins which have survived this far will be
2422 resolved externally and therefore are not incorporeal. */
2423 if (strncmp (name, "__builtin_", 10) == 0)
2424 return true;
2425 }
2426 return false;
2427 }
2428
2429 /* Actually do the tests to determine if this is necessary, and invoke
2430 ASM_OUTPUT_EXTERNAL. */
2431 static void
2432 assemble_external_real (tree decl)
2433 {
2434 rtx rtl = DECL_RTL (decl);
2435
2436 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
2437 && !SYMBOL_REF_USED (XEXP (rtl, 0))
2438 && !incorporeal_function_p (decl))
2439 {
2440 /* Some systems do require some output. */
2441 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1;
2442 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0));
2443 }
2444 }
2445 #endif
2446
2447 void
2448 process_pending_assemble_externals (void)
2449 {
2450 #ifdef ASM_OUTPUT_EXTERNAL
2451 tree list;
2452 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list))
2453 assemble_external_real (TREE_VALUE (list));
2454
2455 pending_assemble_externals = 0;
2456 pending_assemble_externals_processed = true;
2457 delete pending_assemble_externals_set;
2458 #endif
2459 }
2460
2461 /* This TREE_LIST contains any weak symbol declarations waiting
2462 to be emitted. */
2463 static GTY(()) tree weak_decls;
2464
2465 /* Output something to declare an external symbol to the assembler,
2466 and qualifiers such as weakness. (Most assemblers don't need
2467 extern declaration, so we normally output nothing.) Do nothing if
2468 DECL is not external. */
2469
2470 void
2471 assemble_external (tree decl ATTRIBUTE_UNUSED)
2472 {
2473 /* Make sure that the ASM_OUT_FILE is open.
2474 If it's not, we should not be calling this function. */
2475 gcc_assert (asm_out_file);
2476
2477 /* In a perfect world, the following condition would be true.
2478 Sadly, the Go front end emit assembly *from the front end*,
2479 bypassing the call graph. See PR52739. Fix before GCC 4.8. */
2480 #if 0
2481 /* This function should only be called if we are expanding, or have
2482 expanded, to RTL.
2483 Ideally, only final.c would be calling this function, but it is
2484 not clear whether that would break things somehow. See PR 17982
2485 for further discussion. */
2486 gcc_assert (state == EXPANSION
2487 || state == FINISHED);
2488 #endif
2489
2490 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
2491 return;
2492
2493 /* We want to output annotation for weak and external symbols at
2494 very last to check if they are references or not. */
2495
2496 if (TARGET_SUPPORTS_WEAK
2497 && DECL_WEAK (decl)
2498 /* TREE_STATIC is a weird and abused creature which is not
2499 generally the right test for whether an entity has been
2500 locally emitted, inlined or otherwise not-really-extern, but
2501 for declarations that can be weak, it happens to be
2502 match. */
2503 && !TREE_STATIC (decl)
2504 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl))
2505 && value_member (decl, weak_decls) == NULL_TREE)
2506 weak_decls = tree_cons (NULL, decl, weak_decls);
2507
2508 #ifdef ASM_OUTPUT_EXTERNAL
2509 if (pending_assemble_externals_processed)
2510 {
2511 assemble_external_real (decl);
2512 return;
2513 }
2514
2515 if (! pending_assemble_externals_set->add (decl))
2516 pending_assemble_externals = tree_cons (NULL, decl,
2517 pending_assemble_externals);
2518 #endif
2519 }
2520
2521 /* Similar, for calling a library function FUN. */
2522
2523 void
2524 assemble_external_libcall (rtx fun)
2525 {
2526 /* Declare library function name external when first used, if nec. */
2527 if (! SYMBOL_REF_USED (fun))
2528 {
2529 SYMBOL_REF_USED (fun) = 1;
2530 targetm.asm_out.external_libcall (fun);
2531 }
2532 }
2533
2534 /* Assemble a label named NAME. */
2535
2536 void
2537 assemble_label (FILE *file, const char *name)
2538 {
2539 ASM_OUTPUT_LABEL (file, name);
2540 }
2541
2542 /* Set the symbol_referenced flag for ID. */
2543 void
2544 mark_referenced (tree id)
2545 {
2546 TREE_SYMBOL_REFERENCED (id) = 1;
2547 }
2548
2549 /* Set the symbol_referenced flag for DECL and notify callgraph. */
2550 void
2551 mark_decl_referenced (tree decl)
2552 {
2553 if (TREE_CODE (decl) == FUNCTION_DECL)
2554 {
2555 /* Extern inline functions don't become needed when referenced.
2556 If we know a method will be emitted in other TU and no new
2557 functions can be marked reachable, just use the external
2558 definition. */
2559 struct cgraph_node *node = cgraph_node::get_create (decl);
2560 if (!DECL_EXTERNAL (decl)
2561 && !node->definition)
2562 node->mark_force_output ();
2563 }
2564 else if (VAR_P (decl))
2565 {
2566 varpool_node *node = varpool_node::get_create (decl);
2567 /* C++ frontend use mark_decl_references to force COMDAT variables
2568 to be output that might appear dead otherwise. */
2569 node->force_output = true;
2570 }
2571 /* else do nothing - we can get various sorts of CST nodes here,
2572 which do not need to be marked. */
2573 }
2574
2575
2576 /* Output to FILE (an assembly file) a reference to NAME. If NAME
2577 starts with a *, the rest of NAME is output verbatim. Otherwise
2578 NAME is transformed in a target-specific way (usually by the
2579 addition of an underscore). */
2580
2581 void
2582 assemble_name_raw (FILE *file, const char *name)
2583 {
2584 if (name[0] == '*')
2585 fputs (&name[1], file);
2586 else
2587 ASM_OUTPUT_LABELREF (file, name);
2588 }
2589
2590 /* Like assemble_name_raw, but should be used when NAME might refer to
2591 an entity that is also represented as a tree (like a function or
2592 variable). If NAME does refer to such an entity, that entity will
2593 be marked as referenced. */
2594
2595 void
2596 assemble_name (FILE *file, const char *name)
2597 {
2598 const char *real_name;
2599 tree id;
2600
2601 real_name = targetm.strip_name_encoding (name);
2602
2603 id = maybe_get_identifier (real_name);
2604 if (id)
2605 {
2606 tree id_orig = id;
2607
2608 mark_referenced (id);
2609 ultimate_transparent_alias_target (&id);
2610 if (id != id_orig)
2611 name = IDENTIFIER_POINTER (id);
2612 gcc_assert (! TREE_CHAIN (id));
2613 }
2614
2615 assemble_name_raw (file, name);
2616 }
2617
2618 /* Allocate SIZE bytes writable static space with a gensym name
2619 and return an RTX to refer to its address. */
2620
2621 rtx
2622 assemble_static_space (unsigned HOST_WIDE_INT size)
2623 {
2624 char name[17];
2625 const char *namestring;
2626 rtx x;
2627
2628 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno);
2629 ++const_labelno;
2630 namestring = ggc_strdup (name);
2631
2632 x = gen_rtx_SYMBOL_REF (Pmode, namestring);
2633 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL;
2634
2635 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL
2636 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size,
2637 BIGGEST_ALIGNMENT);
2638 #else
2639 #ifdef ASM_OUTPUT_ALIGNED_LOCAL
2640 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT);
2641 #else
2642 {
2643 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2644 so that each uninitialized object starts on such a boundary. */
2645 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */
2646 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED
2647 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1)
2648 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2649 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2650 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2651 }
2652 #endif
2653 #endif
2654 return x;
2655 }
2656
2657 /* Assemble the static constant template for function entry trampolines.
2658 This is done at most once per compilation.
2659 Returns an RTX for the address of the template. */
2660
2661 static GTY(()) rtx initial_trampoline;
2662
2663 rtx
2664 assemble_trampoline_template (void)
2665 {
2666 char label[256];
2667 const char *name;
2668 int align;
2669 rtx symbol;
2670
2671 gcc_assert (targetm.asm_out.trampoline_template != NULL);
2672
2673 if (initial_trampoline)
2674 return initial_trampoline;
2675
2676 /* By default, put trampoline templates in read-only data section. */
2677
2678 #ifdef TRAMPOLINE_SECTION
2679 switch_to_section (TRAMPOLINE_SECTION);
2680 #else
2681 switch_to_section (readonly_data_section);
2682 #endif
2683
2684 /* Write the assembler code to define one. */
2685 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
2686 if (align > 0)
2687 ASM_OUTPUT_ALIGN (asm_out_file, align);
2688
2689 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0);
2690 targetm.asm_out.trampoline_template (asm_out_file);
2691
2692 /* Record the rtl to refer to it. */
2693 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
2694 name = ggc_strdup (label);
2695 symbol = gen_rtx_SYMBOL_REF (Pmode, name);
2696 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
2697
2698 initial_trampoline = gen_const_mem (BLKmode, symbol);
2699 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
2700 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
2701
2702 return initial_trampoline;
2703 }
2704 \f
2705 /* A and B are either alignments or offsets. Return the minimum alignment
2706 that may be assumed after adding the two together. */
2707
2708 static inline unsigned
2709 min_align (unsigned int a, unsigned int b)
2710 {
2711 return least_bit_hwi (a | b);
2712 }
2713
2714 /* Return the assembler directive for creating a given kind of integer
2715 object. SIZE is the number of bytes in the object and ALIGNED_P
2716 indicates whether it is known to be aligned. Return NULL if the
2717 assembly dialect has no such directive.
2718
2719 The returned string should be printed at the start of a new line and
2720 be followed immediately by the object's initial value. */
2721
2722 const char *
2723 integer_asm_op (int size, int aligned_p)
2724 {
2725 struct asm_int_op *ops;
2726
2727 if (aligned_p)
2728 ops = &targetm.asm_out.aligned_op;
2729 else
2730 ops = &targetm.asm_out.unaligned_op;
2731
2732 switch (size)
2733 {
2734 case 1:
2735 return targetm.asm_out.byte_op;
2736 case 2:
2737 return ops->hi;
2738 case 3:
2739 return ops->psi;
2740 case 4:
2741 return ops->si;
2742 case 5:
2743 case 6:
2744 case 7:
2745 return ops->pdi;
2746 case 8:
2747 return ops->di;
2748 case 9:
2749 case 10:
2750 case 11:
2751 case 12:
2752 case 13:
2753 case 14:
2754 case 15:
2755 return ops->pti;
2756 case 16:
2757 return ops->ti;
2758 default:
2759 return NULL;
2760 }
2761 }
2762
2763 /* Use directive OP to assemble an integer object X. Print OP at the
2764 start of the line, followed immediately by the value of X. */
2765
2766 void
2767 assemble_integer_with_op (const char *op, rtx x)
2768 {
2769 fputs (op, asm_out_file);
2770 output_addr_const (asm_out_file, x);
2771 fputc ('\n', asm_out_file);
2772 }
2773
2774 /* The default implementation of the asm_out.integer target hook. */
2775
2776 bool
2777 default_assemble_integer (rtx x ATTRIBUTE_UNUSED,
2778 unsigned int size ATTRIBUTE_UNUSED,
2779 int aligned_p ATTRIBUTE_UNUSED)
2780 {
2781 const char *op = integer_asm_op (size, aligned_p);
2782 /* Avoid GAS bugs for large values. Specifically negative values whose
2783 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */
2784 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS)
2785 return false;
2786 return op && (assemble_integer_with_op (op, x), true);
2787 }
2788
2789 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is
2790 the alignment of the integer in bits. Return 1 if we were able to output
2791 the constant, otherwise 0. We must be able to output the constant,
2792 if FORCE is nonzero. */
2793
2794 bool
2795 assemble_integer (rtx x, unsigned int size, unsigned int align, int force)
2796 {
2797 int aligned_p;
2798
2799 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT));
2800
2801 /* See if the target hook can handle this kind of object. */
2802 if (targetm.asm_out.integer (x, size, aligned_p))
2803 return true;
2804
2805 /* If the object is a multi-byte one, try splitting it up. Split
2806 it into words it if is multi-word, otherwise split it into bytes. */
2807 if (size > 1)
2808 {
2809 machine_mode omode, imode;
2810 unsigned int subalign;
2811 unsigned int subsize, i;
2812 enum mode_class mclass;
2813
2814 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
2815 subalign = MIN (align, subsize * BITS_PER_UNIT);
2816 if (GET_CODE (x) == CONST_FIXED)
2817 mclass = GET_MODE_CLASS (GET_MODE (x));
2818 else
2819 mclass = MODE_INT;
2820
2821 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
2822 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
2823
2824 for (i = 0; i < size; i += subsize)
2825 {
2826 rtx partial = simplify_subreg (omode, x, imode, i);
2827 if (!partial || !assemble_integer (partial, subsize, subalign, 0))
2828 break;
2829 }
2830 if (i == size)
2831 return true;
2832
2833 /* If we've printed some of it, but not all of it, there's no going
2834 back now. */
2835 gcc_assert (!i);
2836 }
2837
2838 gcc_assert (!force);
2839
2840 return false;
2841 }
2842 \f
2843 /* Assemble the floating-point constant D into an object of size MODE. ALIGN
2844 is the alignment of the constant in bits. If REVERSE is true, D is output
2845 in reverse storage order. */
2846
2847 void
2848 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
2849 bool reverse)
2850 {
2851 long data[4] = {0, 0, 0, 0};
2852 int bitsize, nelts, nunits, units_per;
2853 rtx elt;
2854
2855 /* This is hairy. We have a quantity of known size. real_to_target
2856 will put it into an array of *host* longs, 32 bits per element
2857 (even if long is more than 32 bits). We need to determine the
2858 number of array elements that are occupied (nelts) and the number
2859 of *target* min-addressable units that will be occupied in the
2860 object file (nunits). We cannot assume that 32 divides the
2861 mode's bitsize (size * BITS_PER_UNIT) evenly.
2862
2863 size * BITS_PER_UNIT is used here to make sure that padding bits
2864 (which might appear at either end of the value; real_to_target
2865 will include the padding bits in its output array) are included. */
2866
2867 nunits = GET_MODE_SIZE (mode);
2868 bitsize = nunits * BITS_PER_UNIT;
2869 nelts = CEIL (bitsize, 32);
2870 units_per = 32 / BITS_PER_UNIT;
2871
2872 real_to_target (data, &d, mode);
2873
2874 /* Put out the first word with the specified alignment. */
2875 if (reverse)
2876 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode));
2877 else
2878 elt = GEN_INT (data[0]);
2879 assemble_integer (elt, MIN (nunits, units_per), align, 1);
2880 nunits -= units_per;
2881
2882 /* Subsequent words need only 32-bit alignment. */
2883 align = min_align (align, 32);
2884
2885 for (int i = 1; i < nelts; i++)
2886 {
2887 if (reverse)
2888 elt = flip_storage_order (SImode,
2889 gen_int_mode (data[nelts - 1 - i], SImode));
2890 else
2891 elt = GEN_INT (data[i]);
2892 assemble_integer (elt, MIN (nunits, units_per), align, 1);
2893 nunits -= units_per;
2894 }
2895 }
2896 \f
2897 /* Given an expression EXP with a constant value,
2898 reduce it to the sum of an assembler symbol and an integer.
2899 Store them both in the structure *VALUE.
2900 EXP must be reducible. */
2901
2902 struct addr_const {
2903 rtx base;
2904 poly_int64 offset;
2905 };
2906
2907 static void
2908 decode_addr_const (tree exp, struct addr_const *value)
2909 {
2910 tree target = TREE_OPERAND (exp, 0);
2911 poly_int64 offset = 0;
2912 rtx x;
2913
2914 while (1)
2915 {
2916 poly_int64 bytepos;
2917 if (TREE_CODE (target) == COMPONENT_REF
2918 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
2919 &bytepos))
2920 {
2921 offset += bytepos;
2922 target = TREE_OPERAND (target, 0);
2923 }
2924 else if (TREE_CODE (target) == ARRAY_REF
2925 || TREE_CODE (target) == ARRAY_RANGE_REF)
2926 {
2927 /* Truncate big offset. */
2928 offset
2929 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
2930 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
2931 target = TREE_OPERAND (target, 0);
2932 }
2933 else if (TREE_CODE (target) == MEM_REF
2934 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
2935 {
2936 offset += mem_ref_offset (target).force_shwi ();
2937 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
2938 }
2939 else if (TREE_CODE (target) == INDIRECT_REF
2940 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
2941 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
2942 == ADDR_EXPR)
2943 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0);
2944 else
2945 break;
2946 }
2947
2948 switch (TREE_CODE (target))
2949 {
2950 case VAR_DECL:
2951 case FUNCTION_DECL:
2952 x = DECL_RTL (target);
2953 break;
2954
2955 case LABEL_DECL:
2956 x = gen_rtx_MEM (FUNCTION_MODE,
2957 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target)));
2958 break;
2959
2960 case REAL_CST:
2961 case FIXED_CST:
2962 case STRING_CST:
2963 case COMPLEX_CST:
2964 case CONSTRUCTOR:
2965 case INTEGER_CST:
2966 x = lookup_constant_def (target);
2967 /* Should have been added by output_addressed_constants. */
2968 gcc_assert (x);
2969 break;
2970
2971 case INDIRECT_REF:
2972 /* This deals with absolute addresses. */
2973 offset += tree_to_shwi (TREE_OPERAND (target, 0));
2974 x = gen_rtx_MEM (QImode,
2975 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
2976 break;
2977
2978 case COMPOUND_LITERAL_EXPR:
2979 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
2980 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
2981 break;
2982
2983 default:
2984 gcc_unreachable ();
2985 }
2986
2987 gcc_assert (MEM_P (x));
2988 x = XEXP (x, 0);
2989
2990 value->base = x;
2991 value->offset = offset;
2992 }
2993 \f
2994 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab;
2995
2996 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
2997
2998 /* Constant pool accessor function. */
2999
3000 hash_table<tree_descriptor_hasher> *
3001 constant_pool_htab (void)
3002 {
3003 return const_desc_htab;
3004 }
3005
3006 /* Compute a hash code for a constant expression. */
3007
3008 hashval_t
3009 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr)
3010 {
3011 return ptr->hash;
3012 }
3013
3014 static hashval_t
3015 const_hash_1 (const tree exp)
3016 {
3017 const char *p;
3018 hashval_t hi;
3019 int len, i;
3020 enum tree_code code = TREE_CODE (exp);
3021
3022 /* Either set P and LEN to the address and len of something to hash and
3023 exit the switch or return a value. */
3024
3025 switch (code)
3026 {
3027 case INTEGER_CST:
3028 p = (char *) &TREE_INT_CST_ELT (exp, 0);
3029 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
3030 break;
3031
3032 case REAL_CST:
3033 return real_hash (TREE_REAL_CST_PTR (exp));
3034
3035 case FIXED_CST:
3036 return fixed_hash (TREE_FIXED_CST_PTR (exp));
3037
3038 case STRING_CST:
3039 p = TREE_STRING_POINTER (exp);
3040 len = TREE_STRING_LENGTH (exp);
3041 break;
3042
3043 case COMPLEX_CST:
3044 return (const_hash_1 (TREE_REALPART (exp)) * 5
3045 + const_hash_1 (TREE_IMAGPART (exp)));
3046
3047 case VECTOR_CST:
3048 {
3049 hi = 7 + VECTOR_CST_NPATTERNS (exp);
3050 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
3051 unsigned int count = vector_cst_encoded_nelts (exp);
3052 for (unsigned int i = 0; i < count; ++i)
3053 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
3054 return hi;
3055 }
3056
3057 case CONSTRUCTOR:
3058 {
3059 unsigned HOST_WIDE_INT idx;
3060 tree value;
3061
3062 hi = 5 + int_size_in_bytes (TREE_TYPE (exp));
3063
3064 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
3065 if (value)
3066 hi = hi * 603 + const_hash_1 (value);
3067
3068 return hi;
3069 }
3070
3071 case ADDR_EXPR:
3072 if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0)))
3073 return const_hash_1 (TREE_OPERAND (exp, 0));
3074
3075 /* Fallthru. */
3076 case FDESC_EXPR:
3077 {
3078 struct addr_const value;
3079
3080 decode_addr_const (exp, &value);
3081 switch (GET_CODE (value.base))
3082 {
3083 case SYMBOL_REF:
3084 /* Don't hash the address of the SYMBOL_REF;
3085 only use the offset and the symbol name. */
3086 hi = value.offset.coeffs[0];
3087 p = XSTR (value.base, 0);
3088 for (i = 0; p[i] != 0; i++)
3089 hi = ((hi * 613) + (unsigned) (p[i]));
3090 break;
3091
3092 case LABEL_REF:
3093 hi = (value.offset.coeffs[0]
3094 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
3095 break;
3096
3097 default:
3098 gcc_unreachable ();
3099 }
3100 }
3101 return hi;
3102
3103 case PLUS_EXPR:
3104 case POINTER_PLUS_EXPR:
3105 case MINUS_EXPR:
3106 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
3107 + const_hash_1 (TREE_OPERAND (exp, 1)));
3108
3109 CASE_CONVERT:
3110 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
3111
3112 default:
3113 /* A language specific constant. Just hash the code. */
3114 return code;
3115 }
3116
3117 /* Compute hashing function. */
3118 hi = len;
3119 for (i = 0; i < len; i++)
3120 hi = ((hi * 613) + (unsigned) (p[i]));
3121
3122 return hi;
3123 }
3124
3125 /* Wrapper of compare_constant, for the htab interface. */
3126 bool
3127 tree_descriptor_hasher::equal (constant_descriptor_tree *c1,
3128 constant_descriptor_tree *c2)
3129 {
3130 if (c1->hash != c2->hash)
3131 return 0;
3132 return compare_constant (c1->value, c2->value);
3133 }
3134
3135 /* Compare t1 and t2, and return 1 only if they are known to result in
3136 the same bit pattern on output. */
3137
3138 static int
3139 compare_constant (const tree t1, const tree t2)
3140 {
3141 enum tree_code typecode;
3142
3143 if (t1 == NULL_TREE)
3144 return t2 == NULL_TREE;
3145 if (t2 == NULL_TREE)
3146 return 0;
3147
3148 if (TREE_CODE (t1) != TREE_CODE (t2))
3149 return 0;
3150
3151 switch (TREE_CODE (t1))
3152 {
3153 case INTEGER_CST:
3154 /* Integer constants are the same only if the same width of type. */
3155 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3156 return 0;
3157 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3158 return 0;
3159 return tree_int_cst_equal (t1, t2);
3160
3161 case REAL_CST:
3162 /* Real constants are the same only if the same width of type. In
3163 addition to the same width, we need to check whether the modes are the
3164 same. There might be two floating point modes that are the same size
3165 but have different representations, such as the PowerPC that has 2
3166 different 128-bit floating point types (IBM extended double and IEEE
3167 128-bit floating point). */
3168 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3169 return 0;
3170 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3171 return 0;
3172 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3173
3174 case FIXED_CST:
3175 /* Fixed constants are the same only if the same width of type. */
3176 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3177 return 0;
3178
3179 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
3180
3181 case STRING_CST:
3182 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3183 || int_size_in_bytes (TREE_TYPE (t1))
3184 != int_size_in_bytes (TREE_TYPE (t2)))
3185 return 0;
3186
3187 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3188 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3189 TREE_STRING_LENGTH (t1)));
3190
3191 case COMPLEX_CST:
3192 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2))
3193 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2)));
3194
3195 case VECTOR_CST:
3196 {
3197 if (VECTOR_CST_NPATTERNS (t1)
3198 != VECTOR_CST_NPATTERNS (t2))
3199 return 0;
3200
3201 if (VECTOR_CST_NELTS_PER_PATTERN (t1)
3202 != VECTOR_CST_NELTS_PER_PATTERN (t2))
3203 return 0;
3204
3205 unsigned int count = vector_cst_encoded_nelts (t1);
3206 for (unsigned int i = 0; i < count; ++i)
3207 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
3208 VECTOR_CST_ENCODED_ELT (t2, i)))
3209 return 0;
3210
3211 return 1;
3212 }
3213
3214 case CONSTRUCTOR:
3215 {
3216 vec<constructor_elt, va_gc> *v1, *v2;
3217 unsigned HOST_WIDE_INT idx;
3218
3219 typecode = TREE_CODE (TREE_TYPE (t1));
3220 if (typecode != TREE_CODE (TREE_TYPE (t2)))
3221 return 0;
3222
3223 if (typecode == ARRAY_TYPE)
3224 {
3225 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1));
3226 /* For arrays, check that mode, size and storage order match. */
3227 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3228 || size_1 == -1
3229 || size_1 != int_size_in_bytes (TREE_TYPE (t2))
3230 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1))
3231 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2)))
3232 return 0;
3233 }
3234 else
3235 {
3236 /* For record and union constructors, require exact type
3237 equality. */
3238 if (TREE_TYPE (t1) != TREE_TYPE (t2))
3239 return 0;
3240 }
3241
3242 v1 = CONSTRUCTOR_ELTS (t1);
3243 v2 = CONSTRUCTOR_ELTS (t2);
3244 if (vec_safe_length (v1) != vec_safe_length (v2))
3245 return 0;
3246
3247 for (idx = 0; idx < vec_safe_length (v1); ++idx)
3248 {
3249 constructor_elt *c1 = &(*v1)[idx];
3250 constructor_elt *c2 = &(*v2)[idx];
3251
3252 /* Check that each value is the same... */
3253 if (!compare_constant (c1->value, c2->value))
3254 return 0;
3255 /* ... and that they apply to the same fields! */
3256 if (typecode == ARRAY_TYPE)
3257 {
3258 if (!compare_constant (c1->index, c2->index))
3259 return 0;
3260 }
3261 else
3262 {
3263 if (c1->index != c2->index)
3264 return 0;
3265 }
3266 }
3267
3268 return 1;
3269 }
3270
3271 case ADDR_EXPR:
3272 case FDESC_EXPR:
3273 {
3274 struct addr_const value1, value2;
3275 enum rtx_code code;
3276 int ret;
3277
3278 decode_addr_const (t1, &value1);
3279 decode_addr_const (t2, &value2);
3280
3281 if (maybe_ne (value1.offset, value2.offset))
3282 return 0;
3283
3284 code = GET_CODE (value1.base);
3285 if (code != GET_CODE (value2.base))
3286 return 0;
3287
3288 switch (code)
3289 {
3290 case SYMBOL_REF:
3291 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0);
3292 break;
3293
3294 case LABEL_REF:
3295 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base))
3296 == CODE_LABEL_NUMBER (label_ref_label (value2.base)));
3297 break;
3298
3299 default:
3300 gcc_unreachable ();
3301 }
3302 return ret;
3303 }
3304
3305 case PLUS_EXPR:
3306 case POINTER_PLUS_EXPR:
3307 case MINUS_EXPR:
3308 case RANGE_EXPR:
3309 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3310 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3311
3312 CASE_CONVERT:
3313 case VIEW_CONVERT_EXPR:
3314 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3315
3316 default:
3317 return 0;
3318 }
3319
3320 gcc_unreachable ();
3321 }
3322 \f
3323 /* Return the section into which constant EXP should be placed. */
3324
3325 static section *
3326 get_constant_section (tree exp, unsigned int align)
3327 {
3328 return targetm.asm_out.select_section (exp,
3329 compute_reloc_for_constant (exp),
3330 align);
3331 }
3332
3333 /* Return the size of constant EXP in bytes. */
3334
3335 static HOST_WIDE_INT
3336 get_constant_size (tree exp)
3337 {
3338 HOST_WIDE_INT size;
3339
3340 size = int_size_in_bytes (TREE_TYPE (exp));
3341 gcc_checking_assert (size >= 0);
3342 gcc_checking_assert (TREE_CODE (exp) != STRING_CST
3343 || size >= TREE_STRING_LENGTH (exp));
3344 return size;
3345 }
3346
3347 /* Subroutine of output_constant_def:
3348 No constant equal to EXP is known to have been output.
3349 Make a constant descriptor to enter EXP in the hash table.
3350 Assign the label number and construct RTL to refer to the
3351 constant's location in memory.
3352 Caller is responsible for updating the hash table. */
3353
3354 static struct constant_descriptor_tree *
3355 build_constant_desc (tree exp)
3356 {
3357 struct constant_descriptor_tree *desc;
3358 rtx symbol, rtl;
3359 char label[256];
3360 int labelno;
3361 tree decl;
3362
3363 desc = ggc_alloc<constant_descriptor_tree> ();
3364 desc->value = exp;
3365
3366 /* Create a string containing the label name, in LABEL. */
3367 labelno = const_labelno++;
3368 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
3369
3370 /* Construct the VAR_DECL associated with the constant. */
3371 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label),
3372 TREE_TYPE (exp));
3373 DECL_ARTIFICIAL (decl) = 1;
3374 DECL_IGNORED_P (decl) = 1;
3375 TREE_READONLY (decl) = 1;
3376 TREE_STATIC (decl) = 1;
3377 TREE_ADDRESSABLE (decl) = 1;
3378 /* We don't set the RTL yet as this would cause varpool to assume that the
3379 variable is referenced. Moreover, it would just be dropped in LTO mode.
3380 Instead we set the flag that will be recognized in make_decl_rtl. */
3381 DECL_IN_CONSTANT_POOL (decl) = 1;
3382 DECL_INITIAL (decl) = desc->value;
3383 /* ??? targetm.constant_alignment hasn't been updated for vector types on
3384 most architectures so use DATA_ALIGNMENT as well, except for strings. */
3385 if (TREE_CODE (exp) == STRING_CST)
3386 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
3387 else
3388 align_variable (decl, 0);
3389
3390 /* Now construct the SYMBOL_REF and the MEM. */
3391 if (use_object_blocks_p ())
3392 {
3393 int align = (TREE_CODE (decl) == CONST_DECL
3394 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3395 ? DECL_ALIGN (decl)
3396 : symtab_node::get (decl)->definition_alignment ());
3397 section *sect = get_constant_section (exp, align);
3398 symbol = create_block_symbol (ggc_strdup (label),
3399 get_block_for_section (sect), -1);
3400 }
3401 else
3402 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3403 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3404 SET_SYMBOL_REF_DECL (symbol, decl);
3405 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3406
3407 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol);
3408 set_mem_attributes (rtl, exp, 1);
3409 set_mem_alias_set (rtl, 0);
3410
3411 /* Putting EXP into the literal pool might have imposed a different
3412 alignment which should be visible in the RTX as well. */
3413 set_mem_align (rtl, DECL_ALIGN (decl));
3414
3415 /* We cannot share RTX'es in pool entries.
3416 Mark this piece of RTL as required for unsharing. */
3417 RTX_FLAG (rtl, used) = 1;
3418
3419 /* Set flags or add text to the name to record information, such as
3420 that it is a local symbol. If the name is changed, the macro
3421 ASM_OUTPUT_LABELREF will have to know how to strip this
3422 information. This call might invalidate our local variable
3423 SYMBOL; we can't use it afterward. */
3424 targetm.encode_section_info (exp, rtl, true);
3425
3426 desc->rtl = rtl;
3427
3428 return desc;
3429 }
3430
3431 /* Subroutine of output_constant_def and tree_output_constant_def:
3432 Add a constant to the hash table that tracks which constants
3433 already have labels. */
3434
3435 static constant_descriptor_tree *
3436 add_constant_to_table (tree exp)
3437 {
3438 /* The hash table methods may call output_constant_def for addressed
3439 constants, so handle them first. */
3440 output_addressed_constants (exp);
3441
3442 /* Sanity check to catch recursive insertion. */
3443 static bool inserting;
3444 gcc_assert (!inserting);
3445 inserting = true;
3446
3447 /* Look up EXP in the table of constant descriptors. If we didn't
3448 find it, create a new one. */
3449 struct constant_descriptor_tree key;
3450 key.value = exp;
3451 key.hash = const_hash_1 (exp);
3452 constant_descriptor_tree **loc
3453 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3454
3455 inserting = false;
3456
3457 struct constant_descriptor_tree *desc = *loc;
3458 if (!desc)
3459 {
3460 desc = build_constant_desc (exp);
3461 desc->hash = key.hash;
3462 *loc = desc;
3463 }
3464
3465 return desc;
3466 }
3467
3468 /* Return an rtx representing a reference to constant data in memory
3469 for the constant expression EXP.
3470
3471 If assembler code for such a constant has already been output,
3472 return an rtx to refer to it.
3473 Otherwise, output such a constant in memory
3474 and generate an rtx for it.
3475
3476 If DEFER is nonzero, this constant can be deferred and output only
3477 if referenced in the function after all optimizations.
3478
3479 `const_desc_table' records which constants already have label strings. */
3480
3481 rtx
3482 output_constant_def (tree exp, int defer)
3483 {
3484 struct constant_descriptor_tree *desc = add_constant_to_table (exp);
3485 maybe_output_constant_def_contents (desc, defer);
3486 return desc->rtl;
3487 }
3488
3489 /* Subroutine of output_constant_def: Decide whether or not we need to
3490 output the constant DESC now, and if so, do it. */
3491 static void
3492 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc,
3493 int defer)
3494 {
3495 rtx symbol = XEXP (desc->rtl, 0);
3496 tree exp = desc->value;
3497
3498 if (flag_syntax_only)
3499 return;
3500
3501 if (TREE_ASM_WRITTEN (exp))
3502 /* Already output; don't do it again. */
3503 return;
3504
3505 /* We can always defer constants as long as the context allows
3506 doing so. */
3507 if (defer)
3508 {
3509 /* Increment n_deferred_constants if it exists. It needs to be at
3510 least as large as the number of constants actually referred to
3511 by the function. If it's too small we'll stop looking too early
3512 and fail to emit constants; if it's too large we'll only look
3513 through the entire function when we could have stopped earlier. */
3514 if (cfun)
3515 n_deferred_constants++;
3516 return;
3517 }
3518
3519 output_constant_def_contents (symbol);
3520 }
3521
3522 /* Subroutine of output_constant_def_contents. Output the definition
3523 of constant EXP, which is pointed to by label LABEL. ALIGN is the
3524 constant's alignment in bits. */
3525
3526 static void
3527 assemble_constant_contents (tree exp, const char *label, unsigned int align,
3528 bool merge_strings)
3529 {
3530 HOST_WIDE_INT size;
3531
3532 size = get_constant_size (exp);
3533
3534 /* Do any machine/system dependent processing of the constant. */
3535 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
3536
3537 /* Output the value of EXP. */
3538 output_constant (exp, size, align, false, merge_strings);
3539
3540 targetm.asm_out.decl_end ();
3541 }
3542
3543 /* We must output the constant data referred to by SYMBOL; do so. */
3544
3545 static void
3546 output_constant_def_contents (rtx symbol)
3547 {
3548 tree decl = SYMBOL_REF_DECL (symbol);
3549 tree exp = DECL_INITIAL (decl);
3550 bool asan_protected = false;
3551
3552 /* Make sure any other constants whose addresses appear in EXP
3553 are assigned label numbers. */
3554 output_addressed_constants (exp);
3555
3556 /* We are no longer deferring this constant. */
3557 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1;
3558
3559 if ((flag_sanitize & SANITIZE_ADDRESS)
3560 && TREE_CODE (exp) == STRING_CST
3561 && asan_protect_global (exp))
3562 {
3563 asan_protected = true;
3564 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
3565 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
3566 }
3567
3568 /* If the constant is part of an object block, make sure that the
3569 decl has been positioned within its block, but do not write out
3570 its definition yet. output_object_blocks will do that later. */
3571 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
3572 place_block_symbol (symbol);
3573 else
3574 {
3575 int align = (TREE_CODE (decl) == CONST_DECL
3576 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3577 ? DECL_ALIGN (decl)
3578 : symtab_node::get (decl)->definition_alignment ());
3579 section *sect = get_constant_section (exp, align);
3580 switch_to_section (sect);
3581 if (align > BITS_PER_UNIT)
3582 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
3583 assemble_constant_contents (exp, XSTR (symbol, 0), align,
3584 (sect->common.flags & SECTION_MERGE)
3585 && (sect->common.flags & SECTION_STRINGS));
3586 if (asan_protected)
3587 {
3588 HOST_WIDE_INT size = get_constant_size (exp);
3589 assemble_zeros (asan_red_zone_size (size));
3590 }
3591 }
3592 }
3593
3594 /* Look up EXP in the table of constant descriptors. Return the rtl
3595 if it has been emitted, else null. */
3596
3597 rtx
3598 lookup_constant_def (tree exp)
3599 {
3600 struct constant_descriptor_tree key;
3601
3602 key.value = exp;
3603 key.hash = const_hash_1 (exp);
3604 constant_descriptor_tree *desc
3605 = const_desc_htab->find_with_hash (&key, key.hash);
3606
3607 return (desc ? desc->rtl : NULL_RTX);
3608 }
3609
3610 /* Return a tree representing a reference to constant data in memory
3611 for the constant expression EXP.
3612
3613 This is the counterpart of output_constant_def at the Tree level. */
3614
3615 tree
3616 tree_output_constant_def (tree exp)
3617 {
3618 struct constant_descriptor_tree *desc = add_constant_to_table (exp);
3619 tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
3620 varpool_node::finalize_decl (decl);
3621 return decl;
3622 }
3623 \f
3624 struct GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx {
3625 struct constant_descriptor_rtx *next;
3626 rtx mem;
3627 rtx sym;
3628 rtx constant;
3629 HOST_WIDE_INT offset;
3630 hashval_t hash;
3631 fixed_size_mode mode;
3632 unsigned int align;
3633 int labelno;
3634 int mark;
3635 };
3636
3637 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx>
3638 {
3639 static hashval_t hash (constant_descriptor_rtx *);
3640 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *);
3641 };
3642
3643 /* Used in the hash tables to avoid outputting the same constant
3644 twice. Unlike 'struct constant_descriptor_tree', RTX constants
3645 are output once per function, not once per file. */
3646 /* ??? Only a few targets need per-function constant pools. Most
3647 can use one per-file pool. Should add a targetm bit to tell the
3648 difference. */
3649
3650 struct GTY(()) rtx_constant_pool {
3651 /* Pointers to first and last constant in pool, as ordered by offset. */
3652 struct constant_descriptor_rtx *first;
3653 struct constant_descriptor_rtx *last;
3654
3655 /* Hash facility for making memory-constants from constant rtl-expressions.
3656 It is used on RISC machines where immediate integer arguments and
3657 constant addresses are restricted so that such constants must be stored
3658 in memory. */
3659 hash_table<const_rtx_desc_hasher> *const_rtx_htab;
3660
3661 /* Current offset in constant pool (does not include any
3662 machine-specific header). */
3663 HOST_WIDE_INT offset;
3664 };
3665
3666 /* Hash and compare functions for const_rtx_htab. */
3667
3668 hashval_t
3669 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc)
3670 {
3671 return desc->hash;
3672 }
3673
3674 bool
3675 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x,
3676 constant_descriptor_rtx *y)
3677 {
3678 if (x->mode != y->mode)
3679 return 0;
3680 return rtx_equal_p (x->constant, y->constant);
3681 }
3682
3683 /* Hash one component of a constant. */
3684
3685 static hashval_t
3686 const_rtx_hash_1 (const_rtx x)
3687 {
3688 unsigned HOST_WIDE_INT hwi;
3689 machine_mode mode;
3690 enum rtx_code code;
3691 hashval_t h;
3692 int i;
3693
3694 code = GET_CODE (x);
3695 mode = GET_MODE (x);
3696 h = (hashval_t) code * 1048573 + mode;
3697
3698 switch (code)
3699 {
3700 case CONST_INT:
3701 hwi = INTVAL (x);
3702
3703 fold_hwi:
3704 {
3705 int shift = sizeof (hashval_t) * CHAR_BIT;
3706 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
3707
3708 h ^= (hashval_t) hwi;
3709 for (i = 1; i < n; ++i)
3710 {
3711 hwi >>= shift;
3712 h ^= (hashval_t) hwi;
3713 }
3714 }
3715 break;
3716
3717 case CONST_WIDE_INT:
3718 hwi = 0;
3719 {
3720 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
3721 hwi ^= CONST_WIDE_INT_ELT (x, i);
3722 goto fold_hwi;
3723 }
3724
3725 case CONST_DOUBLE:
3726 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
3727 {
3728 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
3729 goto fold_hwi;
3730 }
3731 else
3732 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
3733 break;
3734
3735 case CONST_FIXED:
3736 h ^= fixed_hash (CONST_FIXED_VALUE (x));
3737 break;
3738
3739 case SYMBOL_REF:
3740 h ^= htab_hash_string (XSTR (x, 0));
3741 break;
3742
3743 case LABEL_REF:
3744 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x));
3745 break;
3746
3747 case UNSPEC:
3748 case UNSPEC_VOLATILE:
3749 h = h * 251 + XINT (x, 1);
3750 break;
3751
3752 default:
3753 break;
3754 }
3755
3756 return h;
3757 }
3758
3759 /* Compute a hash value for X, which should be a constant. */
3760
3761 static hashval_t
3762 const_rtx_hash (rtx x)
3763 {
3764 hashval_t h = 0;
3765 subrtx_iterator::array_type array;
3766 FOR_EACH_SUBRTX (iter, array, x, ALL)
3767 h = h * 509 + const_rtx_hash_1 (*iter);
3768 return h;
3769 }
3770
3771 \f
3772 /* Create and return a new rtx constant pool. */
3773
3774 static struct rtx_constant_pool *
3775 create_constant_pool (void)
3776 {
3777 struct rtx_constant_pool *pool;
3778
3779 pool = ggc_alloc<rtx_constant_pool> ();
3780 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31);
3781 pool->first = NULL;
3782 pool->last = NULL;
3783 pool->offset = 0;
3784 return pool;
3785 }
3786
3787 /* Initialize constant pool hashing for a new function. */
3788
3789 void
3790 init_varasm_status (void)
3791 {
3792 crtl->varasm.pool = create_constant_pool ();
3793 crtl->varasm.deferred_constants = 0;
3794 }
3795 \f
3796 /* Given a MINUS expression, simplify it if both sides
3797 include the same symbol. */
3798
3799 rtx
3800 simplify_subtraction (rtx x)
3801 {
3802 rtx r = simplify_rtx (x);
3803 return r ? r : x;
3804 }
3805 \f
3806 /* Given a constant rtx X, make (or find) a memory constant for its value
3807 and return a MEM rtx to refer to it in memory. IN_MODE is the mode
3808 of X. */
3809
3810 rtx
3811 force_const_mem (machine_mode in_mode, rtx x)
3812 {
3813 struct constant_descriptor_rtx *desc, tmp;
3814 struct rtx_constant_pool *pool;
3815 char label[256];
3816 rtx def, symbol;
3817 hashval_t hash;
3818 unsigned int align;
3819 constant_descriptor_rtx **slot;
3820 fixed_size_mode mode;
3821
3822 /* We can't force variable-sized objects to memory. */
3823 if (!is_a <fixed_size_mode> (in_mode, &mode))
3824 return NULL_RTX;
3825
3826 /* If we're not allowed to drop X into the constant pool, don't. */
3827 if (targetm.cannot_force_const_mem (mode, x))
3828 return NULL_RTX;
3829
3830 /* Record that this function has used a constant pool entry. */
3831 crtl->uses_const_pool = 1;
3832
3833 /* Decide which pool to use. */
3834 pool = (targetm.use_blocks_for_constant_p (mode, x)
3835 ? shared_constant_pool
3836 : crtl->varasm.pool);
3837
3838 /* Lookup the value in the hashtable. */
3839 tmp.constant = x;
3840 tmp.mode = mode;
3841 hash = const_rtx_hash (x);
3842 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT);
3843 desc = *slot;
3844
3845 /* If the constant was already present, return its memory. */
3846 if (desc)
3847 return copy_rtx (desc->mem);
3848
3849 /* Otherwise, create a new descriptor. */
3850 desc = ggc_alloc<constant_descriptor_rtx> ();
3851 *slot = desc;
3852
3853 /* Align the location counter as required by EXP's data type. */
3854 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
3855 align = targetm.static_rtx_alignment (align_mode);
3856
3857 pool->offset += (align / BITS_PER_UNIT) - 1;
3858 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
3859
3860 desc->next = NULL;
3861 desc->constant = copy_rtx (tmp.constant);
3862 desc->offset = pool->offset;
3863 desc->hash = hash;
3864 desc->mode = mode;
3865 desc->align = align;
3866 desc->labelno = const_labelno;
3867 desc->mark = 0;
3868
3869 pool->offset += GET_MODE_SIZE (mode);
3870 if (pool->last)
3871 pool->last->next = desc;
3872 else
3873 pool->first = pool->last = desc;
3874 pool->last = desc;
3875
3876 /* Create a string containing the label name, in LABEL. */
3877 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
3878 ++const_labelno;
3879
3880 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to
3881 the constants pool. */
3882 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
3883 {
3884 section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
3885 symbol = create_block_symbol (ggc_strdup (label),
3886 get_block_for_section (sect), -1);
3887 }
3888 else
3889 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3890 desc->sym = symbol;
3891 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3892 CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3893 SET_SYMBOL_REF_CONSTANT (symbol, desc);
3894
3895 /* Construct the MEM. */
3896 desc->mem = def = gen_const_mem (mode, symbol);
3897 set_mem_align (def, align);
3898
3899 /* If we're dropping a label to the constant pool, make sure we
3900 don't delete it. */
3901 if (GET_CODE (x) == LABEL_REF)
3902 LABEL_PRESERVE_P (XEXP (x, 0)) = 1;
3903
3904 return copy_rtx (def);
3905 }
3906 \f
3907 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */
3908
3909 rtx
3910 get_pool_constant (const_rtx addr)
3911 {
3912 return SYMBOL_REF_CONSTANT (addr)->constant;
3913 }
3914
3915 /* Given a constant pool SYMBOL_REF, return the corresponding constant
3916 and whether it has been output or not. */
3917
3918 rtx
3919 get_pool_constant_mark (rtx addr, bool *pmarked)
3920 {
3921 struct constant_descriptor_rtx *desc;
3922
3923 desc = SYMBOL_REF_CONSTANT (addr);
3924 *pmarked = (desc->mark != 0);
3925 return desc->constant;
3926 }
3927
3928 /* Similar, return the mode. */
3929
3930 fixed_size_mode
3931 get_pool_mode (const_rtx addr)
3932 {
3933 return SYMBOL_REF_CONSTANT (addr)->mode;
3934 }
3935
3936 /* Return TRUE if and only if the constant pool has no entries. Note
3937 that even entries we might end up choosing not to emit are counted
3938 here, so there is the potential for missed optimizations. */
3939
3940 bool
3941 constant_pool_empty_p (void)
3942 {
3943 return crtl->varasm.pool->first == NULL;
3944 }
3945 \f
3946 /* Worker function for output_constant_pool_1. Emit assembly for X
3947 in MODE with known alignment ALIGN. */
3948
3949 static void
3950 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
3951 {
3952 switch (GET_MODE_CLASS (mode))
3953 {
3954 case MODE_FLOAT:
3955 case MODE_DECIMAL_FLOAT:
3956 {
3957 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
3958 assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
3959 as_a <scalar_float_mode> (mode), align, false);
3960 break;
3961 }
3962
3963 case MODE_INT:
3964 case MODE_PARTIAL_INT:
3965 case MODE_FRACT:
3966 case MODE_UFRACT:
3967 case MODE_ACCUM:
3968 case MODE_UACCUM:
3969 assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
3970 break;
3971
3972 case MODE_VECTOR_BOOL:
3973 {
3974 gcc_assert (GET_CODE (x) == CONST_VECTOR);
3975
3976 /* Pick the smallest integer mode that contains at least one
3977 whole element. Often this is byte_mode and contains more
3978 than one element. */
3979 unsigned int nelts = GET_MODE_NUNITS (mode);
3980 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
3981 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
3982 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
3983
3984 /* Build the constant up one integer at a time. */
3985 unsigned int elts_per_int = int_bits / elt_bits;
3986 for (unsigned int i = 0; i < nelts; i += elts_per_int)
3987 {
3988 unsigned HOST_WIDE_INT value = 0;
3989 unsigned int limit = MIN (nelts - i, elts_per_int);
3990 for (unsigned int j = 0; j < limit; ++j)
3991 if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0)
3992 value |= 1 << (j * elt_bits);
3993 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
3994 i != 0 ? MIN (align, int_bits) : align);
3995 }
3996 break;
3997 }
3998 case MODE_VECTOR_FLOAT:
3999 case MODE_VECTOR_INT:
4000 case MODE_VECTOR_FRACT:
4001 case MODE_VECTOR_UFRACT:
4002 case MODE_VECTOR_ACCUM:
4003 case MODE_VECTOR_UACCUM:
4004 {
4005 int i, units;
4006 scalar_mode submode = GET_MODE_INNER (mode);
4007 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
4008
4009 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4010 units = GET_MODE_NUNITS (mode);
4011
4012 for (i = 0; i < units; i++)
4013 {
4014 rtx elt = CONST_VECTOR_ELT (x, i);
4015 output_constant_pool_2 (submode, elt, i ? subalign : align);
4016 }
4017 }
4018 break;
4019
4020 default:
4021 gcc_unreachable ();
4022 }
4023 }
4024
4025 /* Worker function for output_constant_pool. Emit constant DESC,
4026 giving it ALIGN bits of alignment. */
4027
4028 static void
4029 output_constant_pool_1 (struct constant_descriptor_rtx *desc,
4030 unsigned int align)
4031 {
4032 rtx x, tmp;
4033
4034 x = desc->constant;
4035
4036 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
4037 whose CODE_LABEL has been deleted. This can occur if a jump table
4038 is eliminated by optimization. If so, write a constant of zero
4039 instead. Note that this can also happen by turning the
4040 CODE_LABEL into a NOTE. */
4041 /* ??? This seems completely and utterly wrong. Certainly it's
4042 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper
4043 functioning even with rtx_insn::deleted and friends. */
4044
4045 tmp = x;
4046 switch (GET_CODE (tmp))
4047 {
4048 case CONST:
4049 if (GET_CODE (XEXP (tmp, 0)) != PLUS
4050 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
4051 break;
4052 tmp = XEXP (XEXP (tmp, 0), 0);
4053 /* FALLTHRU */
4054
4055 case LABEL_REF:
4056 {
4057 rtx_insn *insn = label_ref_label (tmp);
4058 gcc_assert (!insn->deleted ());
4059 gcc_assert (!NOTE_P (insn)
4060 || NOTE_KIND (insn) != NOTE_INSN_DELETED);
4061 break;
4062 }
4063
4064 default:
4065 break;
4066 }
4067
4068 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4069 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
4070 align, desc->labelno, done);
4071 #endif
4072
4073 assemble_align (align);
4074
4075 /* Output the label. */
4076 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
4077
4078 /* Output the data.
4079 Pass actual alignment value while emitting string constant to asm code
4080 as function 'output_constant_pool_1' explicitly passes the alignment as 1
4081 assuming that the data is already aligned which prevents the generation
4082 of fix-up table entries. */
4083 output_constant_pool_2 (desc->mode, x, desc->align);
4084
4085 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
4086 sections have proper size. */
4087 if (align > GET_MODE_BITSIZE (desc->mode)
4088 && in_section
4089 && (in_section->common.flags & SECTION_MERGE))
4090 assemble_align (align);
4091
4092 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4093 done:
4094 #endif
4095 return;
4096 }
4097
4098 /* Recompute the offsets of entries in POOL, and the overall size of
4099 POOL. Do this after calling mark_constant_pool to ensure that we
4100 are computing the offset values for the pool which we will actually
4101 emit. */
4102
4103 static void
4104 recompute_pool_offsets (struct rtx_constant_pool *pool)
4105 {
4106 struct constant_descriptor_rtx *desc;
4107 pool->offset = 0;
4108
4109 for (desc = pool->first; desc ; desc = desc->next)
4110 if (desc->mark)
4111 {
4112 /* Recalculate offset. */
4113 unsigned int align = desc->align;
4114 pool->offset += (align / BITS_PER_UNIT) - 1;
4115 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
4116 desc->offset = pool->offset;
4117 pool->offset += GET_MODE_SIZE (desc->mode);
4118 }
4119 }
4120
4121 /* Mark all constants that are referenced by SYMBOL_REFs in X.
4122 Emit referenced deferred strings. */
4123
4124 static void
4125 mark_constants_in_pattern (rtx insn)
4126 {
4127 subrtx_iterator::array_type array;
4128 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4129 {
4130 const_rtx x = *iter;
4131 if (GET_CODE (x) == SYMBOL_REF)
4132 {
4133 if (CONSTANT_POOL_ADDRESS_P (x))
4134 {
4135 struct constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x);
4136 if (desc->mark == 0)
4137 {
4138 desc->mark = 1;
4139 iter.substitute (desc->constant);
4140 }
4141 }
4142 else if (TREE_CONSTANT_POOL_ADDRESS_P (x))
4143 {
4144 tree decl = SYMBOL_REF_DECL (x);
4145 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
4146 {
4147 n_deferred_constants--;
4148 output_constant_def_contents (CONST_CAST_RTX (x));
4149 }
4150 }
4151 }
4152 }
4153 }
4154
4155 /* Look through appropriate parts of INSN, marking all entries in the
4156 constant pool which are actually being used. Entries that are only
4157 referenced by other constants are also marked as used. Emit
4158 deferred strings that are used. */
4159
4160 static void
4161 mark_constants (rtx_insn *insn)
4162 {
4163 if (!INSN_P (insn))
4164 return;
4165
4166 /* Insns may appear inside a SEQUENCE. Only check the patterns of
4167 insns, not any notes that may be attached. We don't want to mark
4168 a constant just because it happens to appear in a REG_EQUIV note. */
4169 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
4170 {
4171 int i, n = seq->len ();
4172 for (i = 0; i < n; ++i)
4173 {
4174 rtx subinsn = seq->element (i);
4175 if (INSN_P (subinsn))
4176 mark_constants_in_pattern (subinsn);
4177 }
4178 }
4179 else
4180 mark_constants_in_pattern (insn);
4181 }
4182
4183 /* Look through the instructions for this function, and mark all the
4184 entries in POOL which are actually being used. Emit deferred constants
4185 which have indeed been used. */
4186
4187 static void
4188 mark_constant_pool (void)
4189 {
4190 rtx_insn *insn;
4191
4192 if (!crtl->uses_const_pool && n_deferred_constants == 0)
4193 return;
4194
4195 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4196 mark_constants (insn);
4197 }
4198
4199 /* Write all the constants in POOL. */
4200
4201 static void
4202 output_constant_pool_contents (struct rtx_constant_pool *pool)
4203 {
4204 struct constant_descriptor_rtx *desc;
4205
4206 for (desc = pool->first; desc ; desc = desc->next)
4207 if (desc->mark)
4208 {
4209 /* If the constant is part of an object_block, make sure that
4210 the constant has been positioned within its block, but do not
4211 write out its definition yet. output_object_blocks will do
4212 that later. */
4213 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4214 && SYMBOL_REF_BLOCK (desc->sym))
4215 place_block_symbol (desc->sym);
4216 else
4217 {
4218 switch_to_section (targetm.asm_out.select_rtx_section
4219 (desc->mode, desc->constant, desc->align));
4220 output_constant_pool_1 (desc, desc->align);
4221 }
4222 }
4223 }
4224
4225 /* Mark all constants that are used in the current function, then write
4226 out the function's private constant pool. */
4227
4228 static void
4229 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
4230 tree fndecl ATTRIBUTE_UNUSED)
4231 {
4232 struct rtx_constant_pool *pool = crtl->varasm.pool;
4233
4234 /* It is possible for gcc to call force_const_mem and then to later
4235 discard the instructions which refer to the constant. In such a
4236 case we do not need to output the constant. */
4237 mark_constant_pool ();
4238
4239 /* Having marked the constant pool entries we'll actually emit, we
4240 now need to rebuild the offset information, which may have become
4241 stale. */
4242 recompute_pool_offsets (pool);
4243
4244 #ifdef ASM_OUTPUT_POOL_PROLOGUE
4245 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset);
4246 #endif
4247
4248 output_constant_pool_contents (pool);
4249
4250 #ifdef ASM_OUTPUT_POOL_EPILOGUE
4251 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
4252 #endif
4253 }
4254 \f
4255 /* Write the contents of the shared constant pool. */
4256
4257 void
4258 output_shared_constant_pool (void)
4259 {
4260 output_constant_pool_contents (shared_constant_pool);
4261 }
4262 \f
4263 /* Determine what kind of relocations EXP may need. */
4264
4265 int
4266 compute_reloc_for_constant (tree exp)
4267 {
4268 int reloc = 0, reloc2;
4269 tree tem;
4270
4271 switch (TREE_CODE (exp))
4272 {
4273 case ADDR_EXPR:
4274 case FDESC_EXPR:
4275 /* Go inside any operations that get_inner_reference can handle and see
4276 if what's inside is a constant: no need to do anything here for
4277 addresses of variables or functions. */
4278 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4279 tem = TREE_OPERAND (tem, 0))
4280 ;
4281
4282 if (TREE_CODE (tem) == MEM_REF
4283 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR)
4284 {
4285 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0));
4286 break;
4287 }
4288
4289 if (!targetm.binds_local_p (tem))
4290 reloc |= 2;
4291 else
4292 reloc |= 1;
4293 break;
4294
4295 case PLUS_EXPR:
4296 case POINTER_PLUS_EXPR:
4297 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4298 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4299 break;
4300
4301 case MINUS_EXPR:
4302 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4303 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4304 /* The difference of two local labels is computable at link time. */
4305 if (reloc == 1 && reloc2 == 1)
4306 reloc = 0;
4307 else
4308 reloc |= reloc2;
4309 break;
4310
4311 CASE_CONVERT:
4312 case VIEW_CONVERT_EXPR:
4313 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4314 break;
4315
4316 case CONSTRUCTOR:
4317 {
4318 unsigned HOST_WIDE_INT idx;
4319 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4320 if (tem != 0)
4321 reloc |= compute_reloc_for_constant (tem);
4322 }
4323 break;
4324
4325 default:
4326 break;
4327 }
4328 return reloc;
4329 }
4330
4331 /* Find all the constants whose addresses are referenced inside of EXP,
4332 and make sure assembler code with a label has been output for each one.
4333 Indicate whether an ADDR_EXPR has been encountered. */
4334
4335 static void
4336 output_addressed_constants (tree exp)
4337 {
4338 tree tem;
4339
4340 switch (TREE_CODE (exp))
4341 {
4342 case ADDR_EXPR:
4343 case FDESC_EXPR:
4344 /* Go inside any operations that get_inner_reference can handle and see
4345 if what's inside is a constant: no need to do anything here for
4346 addresses of variables or functions. */
4347 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4348 tem = TREE_OPERAND (tem, 0))
4349 ;
4350
4351 /* If we have an initialized CONST_DECL, retrieve the initializer. */
4352 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem))
4353 tem = DECL_INITIAL (tem);
4354
4355 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR)
4356 output_constant_def (tem, 0);
4357
4358 if (TREE_CODE (tem) == MEM_REF)
4359 output_addressed_constants (TREE_OPERAND (tem, 0));
4360 break;
4361
4362 case PLUS_EXPR:
4363 case POINTER_PLUS_EXPR:
4364 case MINUS_EXPR:
4365 output_addressed_constants (TREE_OPERAND (exp, 1));
4366 gcc_fallthrough ();
4367
4368 CASE_CONVERT:
4369 case VIEW_CONVERT_EXPR:
4370 output_addressed_constants (TREE_OPERAND (exp, 0));
4371 break;
4372
4373 case CONSTRUCTOR:
4374 {
4375 unsigned HOST_WIDE_INT idx;
4376 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4377 if (tem != 0)
4378 output_addressed_constants (tem);
4379 }
4380 break;
4381
4382 default:
4383 break;
4384 }
4385 }
4386 \f
4387 /* Whether a constructor CTOR is a valid static constant initializer if all
4388 its elements are. This used to be internal to initializer_constant_valid_p
4389 and has been exposed to let other functions like categorize_ctor_elements
4390 evaluate the property while walking a constructor for other purposes. */
4391
4392 bool
4393 constructor_static_from_elts_p (const_tree ctor)
4394 {
4395 return (TREE_CONSTANT (ctor)
4396 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4397 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE
4398 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE));
4399 }
4400
4401 static tree initializer_constant_valid_p_1 (tree value, tree endtype,
4402 tree *cache);
4403
4404 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
4405 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
4406 which are valid when ENDTYPE is an integer of any size; in
4407 particular, this does not accept a pointer minus a constant. This
4408 returns null_pointer_node if the VALUE is an absolute constant
4409 which can be used to initialize a static variable. Otherwise it
4410 returns NULL. */
4411
4412 static tree
4413 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache)
4414 {
4415 tree op0, op1;
4416
4417 if (!INTEGRAL_TYPE_P (endtype))
4418 return NULL_TREE;
4419
4420 op0 = TREE_OPERAND (value, 0);
4421 op1 = TREE_OPERAND (value, 1);
4422
4423 /* Like STRIP_NOPS except allow the operand mode to widen. This
4424 works around a feature of fold that simplifies (int)(p1 - p2) to
4425 ((int)p1 - (int)p2) under the theory that the narrower operation
4426 is cheaper. */
4427
4428 while (CONVERT_EXPR_P (op0)
4429 || TREE_CODE (op0) == NON_LVALUE_EXPR)
4430 {
4431 tree inner = TREE_OPERAND (op0, 0);
4432 if (inner == error_mark_node
4433 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4434 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
4435 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4436 break;
4437 op0 = inner;
4438 }
4439
4440 while (CONVERT_EXPR_P (op1)
4441 || TREE_CODE (op1) == NON_LVALUE_EXPR)
4442 {
4443 tree inner = TREE_OPERAND (op1, 0);
4444 if (inner == error_mark_node
4445 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4446 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
4447 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4448 break;
4449 op1 = inner;
4450 }
4451
4452 op0 = initializer_constant_valid_p_1 (op0, endtype, cache);
4453 if (!op0)
4454 return NULL_TREE;
4455
4456 op1 = initializer_constant_valid_p_1 (op1, endtype,
4457 cache ? cache + 2 : NULL);
4458 /* Both initializers must be known. */
4459 if (op1)
4460 {
4461 if (op0 == op1
4462 && (op0 == null_pointer_node
4463 || TREE_CODE (value) == MINUS_EXPR))
4464 return null_pointer_node;
4465
4466 /* Support differences between labels. */
4467 if (TREE_CODE (op0) == LABEL_DECL
4468 && TREE_CODE (op1) == LABEL_DECL)
4469 return null_pointer_node;
4470
4471 if (TREE_CODE (op0) == STRING_CST
4472 && TREE_CODE (op1) == STRING_CST
4473 && operand_equal_p (op0, op1, 1))
4474 return null_pointer_node;
4475 }
4476
4477 return NULL_TREE;
4478 }
4479
4480 /* Helper function of initializer_constant_valid_p.
4481 Return nonzero if VALUE is a valid constant-valued expression
4482 for use in initializing a static variable; one that can be an
4483 element of a "constant" initializer.
4484
4485 Return null_pointer_node if the value is absolute;
4486 if it is relocatable, return the variable that determines the relocation.
4487 We assume that VALUE has been folded as much as possible;
4488 therefore, we do not need to check for such things as
4489 arithmetic-combinations of integers.
4490
4491 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */
4492
4493 static tree
4494 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache)
4495 {
4496 tree ret;
4497
4498 switch (TREE_CODE (value))
4499 {
4500 case CONSTRUCTOR:
4501 if (constructor_static_from_elts_p (value))
4502 {
4503 unsigned HOST_WIDE_INT idx;
4504 tree elt;
4505 bool absolute = true;
4506
4507 if (cache && cache[0] == value)
4508 return cache[1];
4509 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4510 {
4511 tree reloc;
4512 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt),
4513 NULL);
4514 if (!reloc
4515 /* An absolute value is required with reverse SSO. */
4516 || (reloc != null_pointer_node
4517 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value))
4518 && !AGGREGATE_TYPE_P (TREE_TYPE (elt))))
4519 {
4520 if (cache)
4521 {
4522 cache[0] = value;
4523 cache[1] = NULL_TREE;
4524 }
4525 return NULL_TREE;
4526 }
4527 if (reloc != null_pointer_node)
4528 absolute = false;
4529 }
4530 /* For a non-absolute relocation, there is no single
4531 variable that can be "the variable that determines the
4532 relocation." */
4533 if (cache)
4534 {
4535 cache[0] = value;
4536 cache[1] = absolute ? null_pointer_node : error_mark_node;
4537 }
4538 return absolute ? null_pointer_node : error_mark_node;
4539 }
4540
4541 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE;
4542
4543 case INTEGER_CST:
4544 case VECTOR_CST:
4545 case REAL_CST:
4546 case FIXED_CST:
4547 case STRING_CST:
4548 case COMPLEX_CST:
4549 return null_pointer_node;
4550
4551 case ADDR_EXPR:
4552 case FDESC_EXPR:
4553 {
4554 tree op0 = staticp (TREE_OPERAND (value, 0));
4555 if (op0)
4556 {
4557 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
4558 to be a constant, this is old-skool offsetof-like nonsense. */
4559 if (TREE_CODE (op0) == INDIRECT_REF
4560 && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
4561 return null_pointer_node;
4562 /* Taking the address of a nested function involves a trampoline,
4563 unless we don't need or want one. */
4564 if (TREE_CODE (op0) == FUNCTION_DECL
4565 && DECL_STATIC_CHAIN (op0)
4566 && !TREE_NO_TRAMPOLINE (value))
4567 return NULL_TREE;
4568 /* "&{...}" requires a temporary to hold the constructed
4569 object. */
4570 if (TREE_CODE (op0) == CONSTRUCTOR)
4571 return NULL_TREE;
4572 }
4573 return op0;
4574 }
4575
4576 case NON_LVALUE_EXPR:
4577 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4578 endtype, cache);
4579
4580 case VIEW_CONVERT_EXPR:
4581 {
4582 tree src = TREE_OPERAND (value, 0);
4583 tree src_type = TREE_TYPE (src);
4584 tree dest_type = TREE_TYPE (value);
4585
4586 /* Allow view-conversions from aggregate to non-aggregate type only
4587 if the bit pattern is fully preserved afterwards; otherwise, the
4588 RTL expander won't be able to apply a subsequent transformation
4589 to the underlying constructor. */
4590 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
4591 {
4592 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
4593 return initializer_constant_valid_p_1 (src, endtype, cache);
4594 else
4595 return NULL_TREE;
4596 }
4597
4598 /* Allow all other kinds of view-conversion. */
4599 return initializer_constant_valid_p_1 (src, endtype, cache);
4600 }
4601
4602 CASE_CONVERT:
4603 {
4604 tree src = TREE_OPERAND (value, 0);
4605 tree src_type = TREE_TYPE (src);
4606 tree dest_type = TREE_TYPE (value);
4607
4608 /* Allow conversions between pointer types, floating-point
4609 types, and offset types. */
4610 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type))
4611 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type))
4612 || (TREE_CODE (dest_type) == OFFSET_TYPE
4613 && TREE_CODE (src_type) == OFFSET_TYPE))
4614 return initializer_constant_valid_p_1 (src, endtype, cache);
4615
4616 /* Allow length-preserving conversions between integer types. */
4617 if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)
4618 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type)))
4619 return initializer_constant_valid_p_1 (src, endtype, cache);
4620
4621 /* Allow conversions between other integer types only if
4622 explicit value. Don't allow sign-extension to a type larger
4623 than word and pointer, there aren't relocations that would
4624 allow to sign extend it to a wider type. */
4625 if (INTEGRAL_TYPE_P (dest_type)
4626 && INTEGRAL_TYPE_P (src_type)
4627 && (TYPE_UNSIGNED (src_type)
4628 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
4629 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
4630 || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
4631 {
4632 tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
4633 if (inner == null_pointer_node)
4634 return null_pointer_node;
4635 break;
4636 }
4637
4638 /* Allow (int) &foo provided int is as wide as a pointer. */
4639 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)
4640 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)))
4641 return initializer_constant_valid_p_1 (src, endtype, cache);
4642
4643 /* Likewise conversions from int to pointers, but also allow
4644 conversions from 0. */
4645 if ((POINTER_TYPE_P (dest_type)
4646 || TREE_CODE (dest_type) == OFFSET_TYPE)
4647 && INTEGRAL_TYPE_P (src_type))
4648 {
4649 if (TREE_CODE (src) == INTEGER_CST
4650 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))
4651 return null_pointer_node;
4652 if (integer_zerop (src))
4653 return null_pointer_node;
4654 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type))
4655 return initializer_constant_valid_p_1 (src, endtype, cache);
4656 }
4657
4658 /* Allow conversions to struct or union types if the value
4659 inside is okay. */
4660 if (TREE_CODE (dest_type) == RECORD_TYPE
4661 || TREE_CODE (dest_type) == UNION_TYPE)
4662 return initializer_constant_valid_p_1 (src, endtype, cache);
4663 }
4664 break;
4665
4666 case POINTER_PLUS_EXPR:
4667 case PLUS_EXPR:
4668 /* Any valid floating-point constants will have been folded by now;
4669 with -frounding-math we hit this with addition of two constants. */
4670 if (TREE_CODE (endtype) == REAL_TYPE)
4671 return NULL_TREE;
4672 if (cache && cache[0] == value)
4673 return cache[1];
4674 if (! INTEGRAL_TYPE_P (endtype)
4675 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4676 {
4677 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4678 tree valid0
4679 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4680 endtype, ncache);
4681 tree valid1
4682 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4683 endtype, ncache + 2);
4684 /* If either term is absolute, use the other term's relocation. */
4685 if (valid0 == null_pointer_node)
4686 ret = valid1;
4687 else if (valid1 == null_pointer_node)
4688 ret = valid0;
4689 /* Support narrowing pointer differences. */
4690 else
4691 ret = narrowing_initializer_constant_valid_p (value, endtype,
4692 ncache);
4693 }
4694 else
4695 /* Support narrowing pointer differences. */
4696 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4697 if (cache)
4698 {
4699 cache[0] = value;
4700 cache[1] = ret;
4701 }
4702 return ret;
4703
4704 case POINTER_DIFF_EXPR:
4705 case MINUS_EXPR:
4706 if (TREE_CODE (endtype) == REAL_TYPE)
4707 return NULL_TREE;
4708 if (cache && cache[0] == value)
4709 return cache[1];
4710 if (! INTEGRAL_TYPE_P (endtype)
4711 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4712 {
4713 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4714 tree valid0
4715 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4716 endtype, ncache);
4717 tree valid1
4718 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4719 endtype, ncache + 2);
4720 /* Win if second argument is absolute. */
4721 if (valid1 == null_pointer_node)
4722 ret = valid0;
4723 /* Win if both arguments have the same relocation.
4724 Then the value is absolute. */
4725 else if (valid0 == valid1 && valid0 != 0)
4726 ret = null_pointer_node;
4727 /* Since GCC guarantees that string constants are unique in the
4728 generated code, a subtraction between two copies of the same
4729 constant string is absolute. */
4730 else if (valid0 && TREE_CODE (valid0) == STRING_CST
4731 && valid1 && TREE_CODE (valid1) == STRING_CST
4732 && operand_equal_p (valid0, valid1, 1))
4733 ret = null_pointer_node;
4734 /* Support narrowing differences. */
4735 else
4736 ret = narrowing_initializer_constant_valid_p (value, endtype,
4737 ncache);
4738 }
4739 else
4740 /* Support narrowing differences. */
4741 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4742 if (cache)
4743 {
4744 cache[0] = value;
4745 cache[1] = ret;
4746 }
4747 return ret;
4748
4749 default:
4750 break;
4751 }
4752
4753 return NULL_TREE;
4754 }
4755
4756 /* Return nonzero if VALUE is a valid constant-valued expression
4757 for use in initializing a static variable; one that can be an
4758 element of a "constant" initializer.
4759
4760 Return null_pointer_node if the value is absolute;
4761 if it is relocatable, return the variable that determines the relocation.
4762 We assume that VALUE has been folded as much as possible;
4763 therefore, we do not need to check for such things as
4764 arithmetic-combinations of integers. */
4765 tree
4766 initializer_constant_valid_p (tree value, tree endtype, bool reverse)
4767 {
4768 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL);
4769
4770 /* An absolute value is required with reverse storage order. */
4771 if (reloc
4772 && reloc != null_pointer_node
4773 && reverse
4774 && !AGGREGATE_TYPE_P (endtype)
4775 && !VECTOR_TYPE_P (endtype))
4776 reloc = NULL_TREE;
4777
4778 return reloc;
4779 }
4780 \f
4781 /* Return true if VALUE is a valid constant-valued expression
4782 for use in initializing a static bit-field; one that can be
4783 an element of a "constant" initializer. */
4784
4785 bool
4786 initializer_constant_valid_for_bitfield_p (tree value)
4787 {
4788 /* For bitfields we support integer constants or possibly nested aggregates
4789 of such. */
4790 switch (TREE_CODE (value))
4791 {
4792 case CONSTRUCTOR:
4793 {
4794 unsigned HOST_WIDE_INT idx;
4795 tree elt;
4796
4797 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4798 if (!initializer_constant_valid_for_bitfield_p (elt))
4799 return false;
4800 return true;
4801 }
4802
4803 case INTEGER_CST:
4804 case REAL_CST:
4805 return true;
4806
4807 case VIEW_CONVERT_EXPR:
4808 case NON_LVALUE_EXPR:
4809 return
4810 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
4811
4812 default:
4813 break;
4814 }
4815
4816 return false;
4817 }
4818
4819 /* Check if a STRING_CST fits into the field.
4820 Tolerate only the case when the NUL termination
4821 does not fit into the field. */
4822
4823 static bool
4824 check_string_literal (tree string, unsigned HOST_WIDE_INT size)
4825 {
4826 tree type = TREE_TYPE (string);
4827 tree eltype = TREE_TYPE (type);
4828 unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
4829 unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
4830 int len = TREE_STRING_LENGTH (string);
4831
4832 if (elts != 1 && elts != 2 && elts != 4)
4833 return false;
4834 if (len < 0 || len % elts != 0)
4835 return false;
4836 if (size < (unsigned)len)
4837 return false;
4838 if (mem_size != size)
4839 return false;
4840 return true;
4841 }
4842
4843 /* output_constructor outer state of relevance in recursive calls, typically
4844 for nested aggregate bitfields. */
4845
4846 struct oc_outer_state {
4847 unsigned int bit_offset; /* current position in ... */
4848 int byte; /* ... the outer byte buffer. */
4849 };
4850
4851 static unsigned HOST_WIDE_INT
4852 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool,
4853 oc_outer_state *);
4854
4855 /* Output assembler code for constant EXP, with no label.
4856 This includes the pseudo-op such as ".int" or ".byte", and a newline.
4857 Assumes output_addressed_constants has been done on EXP already.
4858
4859 Generate at least SIZE bytes of assembler data, padding at the end
4860 with zeros if necessary. SIZE must always be specified. The returned
4861 value is the actual number of bytes of assembler data generated, which
4862 may be bigger than SIZE if the object contains a variable length field.
4863
4864 SIZE is important for structure constructors,
4865 since trailing members may have been omitted from the constructor.
4866 It is also important for initialization of arrays from string constants
4867 since the full length of the string constant might not be wanted.
4868 It is also needed for initialization of unions, where the initializer's
4869 type is just one member, and that may not be as long as the union.
4870
4871 There a case in which we would fail to output exactly SIZE bytes:
4872 for a structure constructor that wants to produce more than SIZE bytes.
4873 But such constructors will never be generated for any possible input.
4874
4875 ALIGN is the alignment of the data in bits.
4876
4877 If REVERSE is true, EXP is output in reverse storage order. */
4878
4879 static unsigned HOST_WIDE_INT
4880 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
4881 bool reverse, bool merge_strings)
4882 {
4883 enum tree_code code;
4884 unsigned HOST_WIDE_INT thissize;
4885 rtx cst;
4886
4887 if (size == 0 || flag_syntax_only)
4888 return size;
4889
4890 /* See if we're trying to initialize a pointer in a non-default mode
4891 to the address of some declaration somewhere. If the target says
4892 the mode is valid for pointers, assume the target has a way of
4893 resolving it. */
4894 if (TREE_CODE (exp) == NOP_EXPR
4895 && POINTER_TYPE_P (TREE_TYPE (exp))
4896 && targetm.addr_space.valid_pointer_mode
4897 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
4898 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
4899 {
4900 tree saved_type = TREE_TYPE (exp);
4901
4902 /* Peel off any intermediate conversions-to-pointer for valid
4903 pointer modes. */
4904 while (TREE_CODE (exp) == NOP_EXPR
4905 && POINTER_TYPE_P (TREE_TYPE (exp))
4906 && targetm.addr_space.valid_pointer_mode
4907 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
4908 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
4909 exp = TREE_OPERAND (exp, 0);
4910
4911 /* If what we're left with is the address of something, we can
4912 convert the address to the final type and output it that
4913 way. */
4914 if (TREE_CODE (exp) == ADDR_EXPR)
4915 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
4916 /* Likewise for constant ints. */
4917 else if (TREE_CODE (exp) == INTEGER_CST)
4918 exp = fold_convert (saved_type, exp);
4919
4920 }
4921
4922 /* Eliminate any conversions since we'll be outputting the underlying
4923 constant. */
4924 while (CONVERT_EXPR_P (exp)
4925 || TREE_CODE (exp) == NON_LVALUE_EXPR
4926 || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
4927 {
4928 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp));
4929 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0)));
4930
4931 /* Make sure eliminating the conversion is really a no-op, except with
4932 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and
4933 union types to allow for Ada unchecked unions. */
4934 if (type_size > op_size
4935 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
4936 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE)
4937 /* Keep the conversion. */
4938 break;
4939 else
4940 exp = TREE_OPERAND (exp, 0);
4941 }
4942
4943 code = TREE_CODE (TREE_TYPE (exp));
4944 thissize = int_size_in_bytes (TREE_TYPE (exp));
4945
4946 /* Allow a constructor with no elements for any data type.
4947 This means to fill the space with zeros. */
4948 if (TREE_CODE (exp) == CONSTRUCTOR
4949 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
4950 {
4951 assemble_zeros (size);
4952 return size;
4953 }
4954
4955 if (TREE_CODE (exp) == FDESC_EXPR)
4956 {
4957 #ifdef ASM_OUTPUT_FDESC
4958 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
4959 tree decl = TREE_OPERAND (exp, 0);
4960 ASM_OUTPUT_FDESC (asm_out_file, decl, part);
4961 #else
4962 gcc_unreachable ();
4963 #endif
4964 return size;
4965 }
4966
4967 /* Now output the underlying data. If we've handling the padding, return.
4968 Otherwise, break and ensure SIZE is the size written. */
4969 switch (code)
4970 {
4971 case BOOLEAN_TYPE:
4972 case INTEGER_TYPE:
4973 case ENUMERAL_TYPE:
4974 case POINTER_TYPE:
4975 case REFERENCE_TYPE:
4976 case OFFSET_TYPE:
4977 case FIXED_POINT_TYPE:
4978 case NULLPTR_TYPE:
4979 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4980 if (reverse)
4981 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst);
4982 if (!assemble_integer (cst, MIN (size, thissize), align, 0))
4983 error ("initializer for integer/fixed-point value is too complicated");
4984 break;
4985
4986 case REAL_TYPE:
4987 if (TREE_CODE (exp) != REAL_CST)
4988 error ("initializer for floating value is not a floating constant");
4989 else
4990 assemble_real (TREE_REAL_CST (exp),
4991 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
4992 align, reverse);
4993 break;
4994
4995 case COMPLEX_TYPE:
4996 output_constant (TREE_REALPART (exp), thissize / 2, align,
4997 reverse, false);
4998 output_constant (TREE_IMAGPART (exp), thissize / 2,
4999 min_align (align, BITS_PER_UNIT * (thissize / 2)),
5000 reverse, false);
5001 break;
5002
5003 case ARRAY_TYPE:
5004 case VECTOR_TYPE:
5005 switch (TREE_CODE (exp))
5006 {
5007 case CONSTRUCTOR:
5008 return output_constructor (exp, size, align, reverse, NULL);
5009 case STRING_CST:
5010 thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp);
5011 if (merge_strings
5012 && (thissize == 0
5013 || TREE_STRING_POINTER (exp) [thissize - 1] != '\0'))
5014 thissize++;
5015 gcc_checking_assert (check_string_literal (exp, size));
5016 assemble_string (TREE_STRING_POINTER (exp), thissize);
5017 break;
5018 case VECTOR_CST:
5019 {
5020 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5021 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
5022 int elt_size = GET_MODE_SIZE (inner);
5023 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
5024 reverse, false);
5025 thissize = elt_size;
5026 /* Static constants must have a fixed size. */
5027 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
5028 for (unsigned int i = 1; i < nunits; i++)
5029 {
5030 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
5031 reverse, false);
5032 thissize += elt_size;
5033 }
5034 break;
5035 }
5036 default:
5037 gcc_unreachable ();
5038 }
5039 break;
5040
5041 case RECORD_TYPE:
5042 case UNION_TYPE:
5043 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
5044 return output_constructor (exp, size, align, reverse, NULL);
5045
5046 case ERROR_MARK:
5047 return 0;
5048
5049 default:
5050 gcc_unreachable ();
5051 }
5052
5053 if (size > thissize)
5054 assemble_zeros (size - thissize);
5055
5056 return size;
5057 }
5058 \f
5059 /* Subroutine of output_constructor, used for computing the size of
5060 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array
5061 type with an unspecified upper bound. */
5062
5063 static unsigned HOST_WIDE_INT
5064 array_size_for_constructor (tree val)
5065 {
5066 tree max_index;
5067 unsigned HOST_WIDE_INT cnt;
5068 tree index, value, tmp;
5069 offset_int i;
5070
5071 /* This code used to attempt to handle string constants that are not
5072 arrays of single-bytes, but nothing else does, so there's no point in
5073 doing it here. */
5074 if (TREE_CODE (val) == STRING_CST)
5075 return TREE_STRING_LENGTH (val);
5076
5077 max_index = NULL_TREE;
5078 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value)
5079 {
5080 if (TREE_CODE (index) == RANGE_EXPR)
5081 index = TREE_OPERAND (index, 1);
5082 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index))
5083 max_index = index;
5084 }
5085
5086 if (max_index == NULL_TREE)
5087 return 0;
5088
5089 /* Compute the total number of array elements. */
5090 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
5091 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
5092
5093 /* Multiply by the array element unit size to find number of bytes. */
5094 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
5095
5096 gcc_assert (wi::fits_uhwi_p (i));
5097 return i.to_uhwi ();
5098 }
5099
5100 /* Other datastructures + helpers for output_constructor. */
5101
5102 /* output_constructor local state to support interaction with helpers. */
5103
5104 struct oc_local_state {
5105
5106 /* Received arguments. */
5107 tree exp; /* Constructor expression. */
5108 tree type; /* Type of constructor expression. */
5109 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
5110 unsigned int align; /* Known initial alignment. */
5111 tree min_index; /* Lower bound if specified for an array. */
5112
5113 /* Output processing state. */
5114 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
5115 int byte; /* Part of a bitfield byte yet to be output. */
5116 int last_relative_index; /* Implicit or explicit index of the last
5117 array element output within a bitfield. */
5118 bool byte_buffer_in_use; /* Whether BYTE is in use. */
5119 bool reverse; /* Whether reverse storage order is in use. */
5120
5121 /* Current element. */
5122 tree field; /* Current field decl in a record. */
5123 tree val; /* Current element value. */
5124 tree index; /* Current element index. */
5125
5126 };
5127
5128 /* Helper for output_constructor. From the current LOCAL state, output a
5129 RANGE_EXPR element. */
5130
5131 static void
5132 output_constructor_array_range (oc_local_state *local)
5133 {
5134 unsigned HOST_WIDE_INT fieldsize
5135 = int_size_in_bytes (TREE_TYPE (local->type));
5136
5137 HOST_WIDE_INT lo_index
5138 = tree_to_shwi (TREE_OPERAND (local->index, 0));
5139 HOST_WIDE_INT hi_index
5140 = tree_to_shwi (TREE_OPERAND (local->index, 1));
5141 HOST_WIDE_INT index;
5142
5143 unsigned int align2
5144 = min_align (local->align, fieldsize * BITS_PER_UNIT);
5145
5146 for (index = lo_index; index <= hi_index; index++)
5147 {
5148 /* Output the element's initial value. */
5149 if (local->val == NULL_TREE)
5150 assemble_zeros (fieldsize);
5151 else
5152 fieldsize = output_constant (local->val, fieldsize, align2,
5153 local->reverse, false);
5154
5155 /* Count its size. */
5156 local->total_bytes += fieldsize;
5157 }
5158 }
5159
5160 /* Helper for output_constructor. From the current LOCAL state, output a
5161 field element that is not true bitfield or part of an outer one. */
5162
5163 static void
5164 output_constructor_regular_field (oc_local_state *local)
5165 {
5166 /* Field size and position. Since this structure is static, we know the
5167 positions are constant. */
5168 unsigned HOST_WIDE_INT fieldsize;
5169 HOST_WIDE_INT fieldpos;
5170
5171 unsigned int align2;
5172
5173 /* Output any buffered-up bit-fields preceding this element. */
5174 if (local->byte_buffer_in_use)
5175 {
5176 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5177 local->total_bytes++;
5178 local->byte_buffer_in_use = false;
5179 }
5180
5181 if (local->index != NULL_TREE)
5182 {
5183 /* Perform the index calculation in modulo arithmetic but
5184 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5185 but we are using an unsigned sizetype. */
5186 unsigned prec = TYPE_PRECISION (sizetype);
5187 offset_int idx = wi::sext (wi::to_offset (local->index)
5188 - wi::to_offset (local->min_index), prec);
5189 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
5190 .to_short_addr ();
5191 }
5192 else if (local->field != NULL_TREE)
5193 fieldpos = int_byte_position (local->field);
5194 else
5195 fieldpos = 0;
5196
5197 /* Advance to offset of this element.
5198 Note no alignment needed in an array, since that is guaranteed
5199 if each element has the proper size. */
5200 if (local->field != NULL_TREE || local->index != NULL_TREE)
5201 {
5202 if (fieldpos > local->total_bytes)
5203 {
5204 assemble_zeros (fieldpos - local->total_bytes);
5205 local->total_bytes = fieldpos;
5206 }
5207 else
5208 /* Must not go backwards. */
5209 gcc_assert (fieldpos == local->total_bytes);
5210 }
5211
5212 /* Find the alignment of this element. */
5213 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
5214
5215 /* Determine size this element should occupy. */
5216 if (local->field)
5217 {
5218 fieldsize = 0;
5219
5220 /* If this is an array with an unspecified upper bound,
5221 the initializer determines the size. */
5222 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
5223 but we cannot do this until the deprecated support for
5224 initializing zero-length array members is removed. */
5225 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
5226 && (!TYPE_DOMAIN (TREE_TYPE (local->field))
5227 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field)))))
5228 {
5229 fieldsize = array_size_for_constructor (local->val);
5230 /* Given a non-empty initialization, this field had better
5231 be last. Given a flexible array member, the next field
5232 on the chain is a TYPE_DECL of the enclosing struct. */
5233 const_tree next = DECL_CHAIN (local->field);
5234 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
5235 tree size = TYPE_SIZE_UNIT (TREE_TYPE (local->val));
5236 gcc_checking_assert (compare_tree_int (size, fieldsize) == 0);
5237 }
5238 else
5239 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
5240 }
5241 else
5242 fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
5243
5244 /* Output the element's initial value. */
5245 if (local->val == NULL_TREE)
5246 assemble_zeros (fieldsize);
5247 else
5248 fieldsize = output_constant (local->val, fieldsize, align2,
5249 local->reverse, false);
5250
5251 /* Count its size. */
5252 local->total_bytes += fieldsize;
5253 }
5254
5255 /* Helper for output_constructor. From the LOCAL state, output an element
5256 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset
5257 from the start of a possibly ongoing outer byte buffer. */
5258
5259 static void
5260 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
5261 {
5262 /* Bit size of this element. */
5263 HOST_WIDE_INT ebitsize
5264 = (local->field
5265 ? tree_to_uhwi (DECL_SIZE (local->field))
5266 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
5267
5268 /* Relative index of this element if this is an array component. */
5269 HOST_WIDE_INT relative_index
5270 = (!local->field
5271 ? (local->index
5272 ? (tree_to_shwi (local->index)
5273 - tree_to_shwi (local->min_index))
5274 : local->last_relative_index + 1)
5275 : 0);
5276
5277 /* Bit position of this element from the start of the containing
5278 constructor. */
5279 HOST_WIDE_INT constructor_relative_ebitpos
5280 = (local->field
5281 ? int_bit_position (local->field)
5282 : ebitsize * relative_index);
5283
5284 /* Bit position of this element from the start of a possibly ongoing
5285 outer byte buffer. */
5286 HOST_WIDE_INT byte_relative_ebitpos
5287 = bit_offset + constructor_relative_ebitpos;
5288
5289 /* From the start of a possibly ongoing outer byte buffer, offsets to
5290 the first bit of this element and to the first bit past the end of
5291 this element. */
5292 HOST_WIDE_INT next_offset = byte_relative_ebitpos;
5293 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
5294
5295 local->last_relative_index = relative_index;
5296
5297 if (local->val == NULL_TREE)
5298 local->val = integer_zero_node;
5299
5300 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
5301 || TREE_CODE (local->val) == NON_LVALUE_EXPR)
5302 local->val = TREE_OPERAND (local->val, 0);
5303
5304 if (TREE_CODE (local->val) != INTEGER_CST
5305 && TREE_CODE (local->val) != CONSTRUCTOR)
5306 {
5307 error ("invalid initial value for member %qE", DECL_NAME (local->field));
5308 return;
5309 }
5310
5311 /* If this field does not start in this (or next) byte, skip some bytes. */
5312 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5313 {
5314 /* Output remnant of any bit field in previous bytes. */
5315 if (local->byte_buffer_in_use)
5316 {
5317 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5318 local->total_bytes++;
5319 local->byte_buffer_in_use = false;
5320 }
5321
5322 /* If still not at proper byte, advance to there. */
5323 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5324 {
5325 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
5326 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
5327 local->total_bytes = next_offset / BITS_PER_UNIT;
5328 }
5329 }
5330
5331 /* Set up the buffer if necessary. */
5332 if (!local->byte_buffer_in_use)
5333 {
5334 local->byte = 0;
5335 if (ebitsize > 0)
5336 local->byte_buffer_in_use = true;
5337 }
5338
5339 /* If this is nested constructor, recurse passing the bit offset and the
5340 pending data, then retrieve the new pending data afterwards. */
5341 if (TREE_CODE (local->val) == CONSTRUCTOR)
5342 {
5343 oc_outer_state temp_state;
5344 temp_state.bit_offset = next_offset % BITS_PER_UNIT;
5345 temp_state.byte = local->byte;
5346 local->total_bytes
5347 += output_constructor (local->val, 0, 0, local->reverse, &temp_state);
5348 local->byte = temp_state.byte;
5349 return;
5350 }
5351
5352 /* Otherwise, we must split the element into pieces that fall within
5353 separate bytes, and combine each byte with previous or following
5354 bit-fields. */
5355 while (next_offset < end_offset)
5356 {
5357 int this_time;
5358 int shift;
5359 unsigned HOST_WIDE_INT value;
5360 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
5361 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
5362
5363 /* Advance from byte to byte within this element when necessary. */
5364 while (next_byte != local->total_bytes)
5365 {
5366 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5367 local->total_bytes++;
5368 local->byte = 0;
5369 }
5370
5371 /* Number of bits we can process at once (all part of the same byte). */
5372 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit);
5373 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5374 {
5375 /* For big-endian data, take the most significant bits (of the
5376 bits that are significant) first and put them into bytes from
5377 the most significant end. */
5378 shift = end_offset - next_offset - this_time;
5379
5380 /* Don't try to take a bunch of bits that cross
5381 the word boundary in the INTEGER_CST. We can
5382 only select bits from one element. */
5383 if ((shift / HOST_BITS_PER_WIDE_INT)
5384 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5385 {
5386 const int end = shift + this_time - 1;
5387 shift = end & -HOST_BITS_PER_WIDE_INT;
5388 this_time = end - shift + 1;
5389 }
5390
5391 /* Now get the bits we want to insert. */
5392 value = wi::extract_uhwi (wi::to_widest (local->val),
5393 shift, this_time);
5394
5395 /* Get the result. This works only when:
5396 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5397 local->byte |= value << (BITS_PER_UNIT - this_time - next_bit);
5398 }
5399 else
5400 {
5401 /* On little-endian machines, take the least significant bits of
5402 the value first and pack them starting at the least significant
5403 bits of the bytes. */
5404 shift = next_offset - byte_relative_ebitpos;
5405
5406 /* Don't try to take a bunch of bits that cross
5407 the word boundary in the INTEGER_CST. We can
5408 only select bits from one element. */
5409 if ((shift / HOST_BITS_PER_WIDE_INT)
5410 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5411 this_time
5412 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
5413
5414 /* Now get the bits we want to insert. */
5415 value = wi::extract_uhwi (wi::to_widest (local->val),
5416 shift, this_time);
5417
5418 /* Get the result. This works only when:
5419 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5420 local->byte |= value << next_bit;
5421 }
5422
5423 next_offset += this_time;
5424 local->byte_buffer_in_use = true;
5425 }
5426 }
5427
5428 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
5429 Generate at least SIZE bytes, padding if necessary. OUTER designates the
5430 caller output state of relevance in recursive invocations. */
5431
5432 static unsigned HOST_WIDE_INT
5433 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5434 bool reverse, oc_outer_state *outer)
5435 {
5436 unsigned HOST_WIDE_INT cnt;
5437 constructor_elt *ce;
5438 oc_local_state local;
5439
5440 /* Setup our local state to communicate with helpers. */
5441 local.exp = exp;
5442 local.type = TREE_TYPE (exp);
5443 local.size = size;
5444 local.align = align;
5445 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type))
5446 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
5447 else
5448 local.min_index = integer_zero_node;
5449
5450 local.total_bytes = 0;
5451 local.byte_buffer_in_use = outer != NULL;
5452 local.byte = outer ? outer->byte : 0;
5453 local.last_relative_index = -1;
5454 /* The storage order is specified for every aggregate type. */
5455 if (AGGREGATE_TYPE_P (local.type))
5456 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type);
5457 else
5458 local.reverse = reverse;
5459
5460 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
5461
5462 /* As CE goes through the elements of the constant, FIELD goes through the
5463 structure fields if the constant is a structure. If the constant is a
5464 union, we override this by getting the field from the TREE_LIST element.
5465 But the constant could also be an array. Then FIELD is zero.
5466
5467 There is always a maximum of one element in the chain LINK for unions
5468 (even if the initializer in a source program incorrectly contains
5469 more one). */
5470
5471 if (TREE_CODE (local.type) == RECORD_TYPE)
5472 local.field = TYPE_FIELDS (local.type);
5473 else
5474 local.field = NULL_TREE;
5475
5476 for (cnt = 0;
5477 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
5478 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
5479 {
5480 local.val = ce->value;
5481 local.index = NULL_TREE;
5482
5483 /* The element in a union constructor specifies the proper field
5484 or index. */
5485 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE)
5486 local.field = ce->index;
5487
5488 else if (TREE_CODE (local.type) == ARRAY_TYPE)
5489 local.index = ce->index;
5490
5491 if (local.field && flag_verbose_asm)
5492 fprintf (asm_out_file, "%s %s:\n",
5493 ASM_COMMENT_START,
5494 DECL_NAME (local.field)
5495 ? IDENTIFIER_POINTER (DECL_NAME (local.field))
5496 : "<anonymous>");
5497
5498 /* Eliminate the marker that makes a cast not be an lvalue. */
5499 if (local.val != NULL_TREE)
5500 STRIP_NOPS (local.val);
5501
5502 /* Output the current element, using the appropriate helper ... */
5503
5504 /* For an array slice not part of an outer bitfield. */
5505 if (!outer
5506 && local.index != NULL_TREE
5507 && TREE_CODE (local.index) == RANGE_EXPR)
5508 output_constructor_array_range (&local);
5509
5510 /* For a field that is neither a true bitfield nor part of an outer one,
5511 known to be at least byte aligned and multiple-of-bytes long. */
5512 else if (!outer
5513 && (local.field == NULL_TREE
5514 || !CONSTRUCTOR_BITFIELD_P (local.field)))
5515 output_constructor_regular_field (&local);
5516
5517 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are
5518 supported for scalar fields, so we may need to convert first. */
5519 else
5520 {
5521 if (TREE_CODE (local.val) == REAL_CST)
5522 local.val
5523 = fold_unary (VIEW_CONVERT_EXPR,
5524 build_nonstandard_integer_type
5525 (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
5526 local.val);
5527 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0);
5528 }
5529 }
5530
5531 /* If we are not at toplevel, save the pending data for our caller.
5532 Otherwise output the pending data and padding zeros as needed. */
5533 if (outer)
5534 outer->byte = local.byte;
5535 else
5536 {
5537 if (local.byte_buffer_in_use)
5538 {
5539 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
5540 local.total_bytes++;
5541 }
5542
5543 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
5544 {
5545 assemble_zeros (local.size - local.total_bytes);
5546 local.total_bytes = local.size;
5547 }
5548 }
5549
5550 return local.total_bytes;
5551 }
5552
5553 /* Mark DECL as weak. */
5554
5555 static void
5556 mark_weak (tree decl)
5557 {
5558 if (DECL_WEAK (decl))
5559 return;
5560
5561 struct symtab_node *n = symtab_node::get (decl);
5562 if (n && n->refuse_visibility_changes)
5563 error ("%+qD declared weak after being used", decl);
5564 DECL_WEAK (decl) = 1;
5565
5566 if (DECL_RTL_SET_P (decl)
5567 && MEM_P (DECL_RTL (decl))
5568 && XEXP (DECL_RTL (decl), 0)
5569 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
5570 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
5571 }
5572
5573 /* Merge weak status between NEWDECL and OLDDECL. */
5574
5575 void
5576 merge_weak (tree newdecl, tree olddecl)
5577 {
5578 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl))
5579 {
5580 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK)
5581 {
5582 tree *pwd;
5583 /* We put the NEWDECL on the weak_decls list at some point
5584 and OLDDECL as well. Keep just OLDDECL on the list. */
5585 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd))
5586 if (TREE_VALUE (*pwd) == newdecl)
5587 {
5588 *pwd = TREE_CHAIN (*pwd);
5589 break;
5590 }
5591 }
5592 return;
5593 }
5594
5595 if (DECL_WEAK (newdecl))
5596 {
5597 tree wd;
5598
5599 /* NEWDECL is weak, but OLDDECL is not. */
5600
5601 /* If we already output the OLDDECL, we're in trouble; we can't
5602 go back and make it weak. This should never happen in
5603 unit-at-a-time compilation. */
5604 gcc_assert (!TREE_ASM_WRITTEN (olddecl));
5605
5606 /* If we've already generated rtl referencing OLDDECL, we may
5607 have done so in a way that will not function properly with
5608 a weak symbol. Again in unit-at-a-time this should be
5609 impossible. */
5610 gcc_assert (!TREE_USED (olddecl)
5611 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl)));
5612
5613 /* PR 49899: You cannot convert a static function into a weak, public function. */
5614 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl))
5615 error ("weak declaration of %q+D being applied to a already "
5616 "existing, static definition", newdecl);
5617
5618 if (TARGET_SUPPORTS_WEAK)
5619 {
5620 /* We put the NEWDECL on the weak_decls list at some point.
5621 Replace it with the OLDDECL. */
5622 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd))
5623 if (TREE_VALUE (wd) == newdecl)
5624 {
5625 TREE_VALUE (wd) = olddecl;
5626 break;
5627 }
5628 /* We may not find the entry on the list. If NEWDECL is a
5629 weak alias, then we will have already called
5630 globalize_decl to remove the entry; in that case, we do
5631 not need to do anything. */
5632 }
5633
5634 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */
5635 mark_weak (olddecl);
5636 }
5637 else
5638 /* OLDDECL was weak, but NEWDECL was not explicitly marked as
5639 weak. Just update NEWDECL to indicate that it's weak too. */
5640 mark_weak (newdecl);
5641 }
5642
5643 /* Declare DECL to be a weak symbol. */
5644
5645 void
5646 declare_weak (tree decl)
5647 {
5648 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL || !TREE_ASM_WRITTEN (decl));
5649 if (! TREE_PUBLIC (decl))
5650 {
5651 error ("weak declaration of %q+D must be public", decl);
5652 return;
5653 }
5654 else if (!TARGET_SUPPORTS_WEAK)
5655 warning (0, "weak declaration of %q+D not supported", decl);
5656
5657 mark_weak (decl);
5658 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl)))
5659 DECL_ATTRIBUTES (decl)
5660 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl));
5661 }
5662
5663 static void
5664 weak_finish_1 (tree decl)
5665 {
5666 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL)
5667 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5668 #endif
5669
5670 if (! TREE_USED (decl))
5671 return;
5672
5673 #ifdef ASM_WEAKEN_DECL
5674 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL);
5675 #else
5676 #ifdef ASM_WEAKEN_LABEL
5677 ASM_WEAKEN_LABEL (asm_out_file, name);
5678 #else
5679 #ifdef ASM_OUTPUT_WEAK_ALIAS
5680 {
5681 static bool warn_once = 0;
5682 if (! warn_once)
5683 {
5684 warning (0, "only weak aliases are supported in this configuration");
5685 warn_once = 1;
5686 }
5687 return;
5688 }
5689 #endif
5690 #endif
5691 #endif
5692 }
5693
5694 /* Fiven an assembly name, find the decl it is associated with. */
5695 static tree
5696 find_decl (tree target)
5697 {
5698 symtab_node *node = symtab_node::get_for_asmname (target);
5699 if (node)
5700 return node->decl;
5701 return NULL_TREE;
5702 }
5703
5704 /* This TREE_LIST contains weakref targets. */
5705
5706 static GTY(()) tree weakref_targets;
5707
5708 /* Emit any pending weak declarations. */
5709
5710 void
5711 weak_finish (void)
5712 {
5713 tree t;
5714
5715 for (t = weakref_targets; t; t = TREE_CHAIN (t))
5716 {
5717 tree alias_decl = TREE_PURPOSE (t);
5718 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
5719
5720 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl))
5721 || TREE_SYMBOL_REFERENCED (target))
5722 /* Remove alias_decl from the weak list, but leave entries for
5723 the target alone. */
5724 target = NULL_TREE;
5725 #ifndef ASM_OUTPUT_WEAKREF
5726 else if (! TREE_SYMBOL_REFERENCED (target))
5727 {
5728 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
5729 defined, otherwise we and weak_finish_1 would use
5730 different macros. */
5731 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
5732 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
5733 # else
5734 tree decl = find_decl (target);
5735
5736 if (! decl)
5737 {
5738 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
5739 TREE_CODE (alias_decl), target,
5740 TREE_TYPE (alias_decl));
5741
5742 DECL_EXTERNAL (decl) = 1;
5743 TREE_PUBLIC (decl) = 1;
5744 DECL_ARTIFICIAL (decl) = 1;
5745 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl);
5746 TREE_USED (decl) = 1;
5747 }
5748
5749 weak_finish_1 (decl);
5750 # endif
5751 }
5752 #endif
5753
5754 {
5755 tree *p;
5756 tree t2;
5757
5758 /* Remove the alias and the target from the pending weak list
5759 so that we do not emit any .weak directives for the former,
5760 nor multiple .weak directives for the latter. */
5761 for (p = &weak_decls; (t2 = *p) ; )
5762 {
5763 if (TREE_VALUE (t2) == alias_decl
5764 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2)))
5765 *p = TREE_CHAIN (t2);
5766 else
5767 p = &TREE_CHAIN (t2);
5768 }
5769
5770 /* Remove other weakrefs to the same target, to speed things up. */
5771 for (p = &TREE_CHAIN (t); (t2 = *p) ; )
5772 {
5773 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2)))
5774 *p = TREE_CHAIN (t2);
5775 else
5776 p = &TREE_CHAIN (t2);
5777 }
5778 }
5779 }
5780
5781 for (t = weak_decls; t; t = TREE_CHAIN (t))
5782 {
5783 tree decl = TREE_VALUE (t);
5784
5785 weak_finish_1 (decl);
5786 }
5787 }
5788
5789 /* Emit the assembly bits to indicate that DECL is globally visible. */
5790
5791 static void
5792 globalize_decl (tree decl)
5793 {
5794
5795 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
5796 if (DECL_WEAK (decl))
5797 {
5798 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
5799 tree *p, t;
5800
5801 #ifdef ASM_WEAKEN_DECL
5802 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0);
5803 #else
5804 ASM_WEAKEN_LABEL (asm_out_file, name);
5805 #endif
5806
5807 /* Remove this function from the pending weak list so that
5808 we do not emit multiple .weak directives for it. */
5809 for (p = &weak_decls; (t = *p) ; )
5810 {
5811 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
5812 *p = TREE_CHAIN (t);
5813 else
5814 p = &TREE_CHAIN (t);
5815 }
5816
5817 /* Remove weakrefs to the same target from the pending weakref
5818 list, for the same reason. */
5819 for (p = &weakref_targets; (t = *p) ; )
5820 {
5821 if (DECL_ASSEMBLER_NAME (decl)
5822 == ultimate_transparent_alias_target (&TREE_VALUE (t)))
5823 *p = TREE_CHAIN (t);
5824 else
5825 p = &TREE_CHAIN (t);
5826 }
5827
5828 return;
5829 }
5830 #endif
5831
5832 targetm.asm_out.globalize_decl_name (asm_out_file, decl);
5833 }
5834
5835 vec<alias_pair, va_gc> *alias_pairs;
5836
5837 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
5838 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
5839 tree node is DECL to have the value of the tree node TARGET. */
5840
5841 void
5842 do_assemble_alias (tree decl, tree target)
5843 {
5844 tree id;
5845
5846 /* Emulated TLS had better not get this var. */
5847 gcc_assert (!(!targetm.have_tls
5848 && VAR_P (decl)
5849 && DECL_THREAD_LOCAL_P (decl)));
5850
5851 if (TREE_ASM_WRITTEN (decl))
5852 return;
5853
5854 id = DECL_ASSEMBLER_NAME (decl);
5855 ultimate_transparent_alias_target (&id);
5856 ultimate_transparent_alias_target (&target);
5857
5858 /* We must force creation of DECL_RTL for debug info generation, even though
5859 we don't use it here. */
5860 make_decl_rtl (decl);
5861
5862 TREE_ASM_WRITTEN (decl) = 1;
5863 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1;
5864 TREE_ASM_WRITTEN (id) = 1;
5865
5866 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
5867 {
5868 if (!TREE_SYMBOL_REFERENCED (target))
5869 weakref_targets = tree_cons (decl, target, weakref_targets);
5870
5871 #ifdef ASM_OUTPUT_WEAKREF
5872 ASM_OUTPUT_WEAKREF (asm_out_file, decl,
5873 IDENTIFIER_POINTER (id),
5874 IDENTIFIER_POINTER (target));
5875 #else
5876 if (!TARGET_SUPPORTS_WEAK)
5877 {
5878 error_at (DECL_SOURCE_LOCATION (decl),
5879 "weakref is not supported in this configuration");
5880 return;
5881 }
5882 #endif
5883 return;
5884 }
5885
5886 #ifdef ASM_OUTPUT_DEF
5887 tree orig_decl = decl;
5888
5889 /* Make name accessible from other files, if appropriate. */
5890
5891 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
5892 {
5893 globalize_decl (decl);
5894 maybe_assemble_visibility (decl);
5895 }
5896 if (TREE_CODE (decl) == FUNCTION_DECL
5897 && cgraph_node::get (decl)->ifunc_resolver)
5898 {
5899 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
5900 if (targetm.has_ifunc_p ())
5901 ASM_OUTPUT_TYPE_DIRECTIVE
5902 (asm_out_file, IDENTIFIER_POINTER (id),
5903 IFUNC_ASM_TYPE);
5904 else
5905 #endif
5906 error_at (DECL_SOURCE_LOCATION (decl),
5907 "%qs is not supported on this target", "ifunc");
5908 }
5909
5910 # ifdef ASM_OUTPUT_DEF_FROM_DECLS
5911 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target);
5912 # else
5913 ASM_OUTPUT_DEF (asm_out_file,
5914 IDENTIFIER_POINTER (id),
5915 IDENTIFIER_POINTER (target));
5916 # endif
5917 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL)
5918 {
5919 const char *name;
5920 tree *p, t;
5921
5922 name = IDENTIFIER_POINTER (id);
5923 # ifdef ASM_WEAKEN_DECL
5924 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target));
5925 # else
5926 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target));
5927 # endif
5928 /* Remove this function from the pending weak list so that
5929 we do not emit multiple .weak directives for it. */
5930 for (p = &weak_decls; (t = *p) ; )
5931 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))
5932 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
5933 *p = TREE_CHAIN (t);
5934 else
5935 p = &TREE_CHAIN (t);
5936
5937 /* Remove weakrefs to the same target from the pending weakref
5938 list, for the same reason. */
5939 for (p = &weakref_targets; (t = *p) ; )
5940 {
5941 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t)))
5942 *p = TREE_CHAIN (t);
5943 else
5944 p = &TREE_CHAIN (t);
5945 }
5946 }
5947 #endif
5948 }
5949
5950 /* Emit an assembler directive to make the symbol for DECL an alias to
5951 the symbol for TARGET. */
5952
5953 void
5954 assemble_alias (tree decl, tree target)
5955 {
5956 tree target_decl;
5957
5958 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
5959 {
5960 tree alias = DECL_ASSEMBLER_NAME (decl);
5961
5962 ultimate_transparent_alias_target (&target);
5963
5964 if (alias == target)
5965 error ("%qs symbol %q+D ultimately targets itself", "weakref", decl);
5966 if (TREE_PUBLIC (decl))
5967 error ("%qs symbol %q+D must have static linkage", "weakref", decl);
5968 }
5969 else
5970 {
5971 #if !defined (ASM_OUTPUT_DEF)
5972 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
5973 error_at (DECL_SOURCE_LOCATION (decl),
5974 "alias definitions not supported in this configuration");
5975 TREE_ASM_WRITTEN (decl) = 1;
5976 return;
5977 # else
5978 if (!DECL_WEAK (decl))
5979 {
5980 /* NB: ifunc_resolver isn't set when an error is detected. */
5981 if (TREE_CODE (decl) == FUNCTION_DECL
5982 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
5983 error_at (DECL_SOURCE_LOCATION (decl),
5984 "%qs is not supported in this configuration", "ifunc");
5985 else
5986 error_at (DECL_SOURCE_LOCATION (decl),
5987 "only weak aliases are supported in this configuration");
5988 TREE_ASM_WRITTEN (decl) = 1;
5989 return;
5990 }
5991 # endif
5992 #endif
5993 }
5994 TREE_USED (decl) = 1;
5995
5996 /* Allow aliases to aliases. */
5997 if (TREE_CODE (decl) == FUNCTION_DECL)
5998 cgraph_node::get_create (decl)->alias = true;
5999 else
6000 varpool_node::get_create (decl)->alias = true;
6001
6002 /* If the target has already been emitted, we don't have to queue the
6003 alias. This saves a tad of memory. */
6004 if (symtab->global_info_ready)
6005 target_decl = find_decl (target);
6006 else
6007 target_decl= NULL;
6008 if ((target_decl && TREE_ASM_WRITTEN (target_decl))
6009 || symtab->state >= EXPANSION)
6010 do_assemble_alias (decl, target);
6011 else
6012 {
6013 alias_pair p = {decl, target};
6014 vec_safe_push (alias_pairs, p);
6015 }
6016 }
6017
6018 /* Record and output a table of translations from original function
6019 to its transaction aware clone. Note that tm_pure functions are
6020 considered to be their own clone. */
6021
6022 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
6023 {
6024 static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
6025 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
6026
6027 static int
6028 keep_cache_entry (tree_map *&e)
6029 {
6030 return ggc_marked_p (e->base.from);
6031 }
6032 };
6033
6034 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash;
6035
6036 void
6037 record_tm_clone_pair (tree o, tree n)
6038 {
6039 struct tree_map **slot, *h;
6040
6041 if (tm_clone_hash == NULL)
6042 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
6043
6044 h = ggc_alloc<tree_map> ();
6045 h->hash = htab_hash_pointer (o);
6046 h->base.from = o;
6047 h->to = n;
6048
6049 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
6050 *slot = h;
6051 }
6052
6053 tree
6054 get_tm_clone_pair (tree o)
6055 {
6056 if (tm_clone_hash)
6057 {
6058 struct tree_map *h, in;
6059
6060 in.base.from = o;
6061 in.hash = htab_hash_pointer (o);
6062 h = tm_clone_hash->find_with_hash (&in, in.hash);
6063 if (h)
6064 return h->to;
6065 }
6066 return NULL_TREE;
6067 }
6068
6069 struct tm_alias_pair
6070 {
6071 unsigned int uid;
6072 tree from;
6073 tree to;
6074 };
6075
6076
6077 /* Dump the actual pairs to the .tm_clone_table section. */
6078
6079 static void
6080 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
6081 {
6082 unsigned i;
6083 tm_alias_pair *p;
6084 bool switched = false;
6085
6086 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
6087 {
6088 tree src = p->from;
6089 tree dst = p->to;
6090 struct cgraph_node *src_n = cgraph_node::get (src);
6091 struct cgraph_node *dst_n = cgraph_node::get (dst);
6092
6093 /* The function ipa_tm_create_version() marks the clone as needed if
6094 the original function was needed. But we also mark the clone as
6095 needed if we ever called the clone indirectly through
6096 TM_GETTMCLONE. If neither of these are true, we didn't generate
6097 a clone, and we didn't call it indirectly... no sense keeping it
6098 in the clone table. */
6099 if (!dst_n || !dst_n->definition)
6100 continue;
6101
6102 /* This covers the case where we have optimized the original
6103 function away, and only access the transactional clone. */
6104 if (!src_n || !src_n->definition)
6105 continue;
6106
6107 if (!switched)
6108 {
6109 switch_to_section (targetm.asm_out.tm_clone_table_section ());
6110 assemble_align (POINTER_SIZE);
6111 switched = true;
6112 }
6113
6114 assemble_integer (XEXP (DECL_RTL (src), 0),
6115 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6116 assemble_integer (XEXP (DECL_RTL (dst), 0),
6117 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6118 }
6119 }
6120
6121 /* Provide a default for the tm_clone_table section. */
6122
6123 section *
6124 default_clone_table_section (void)
6125 {
6126 return get_named_section (NULL, ".tm_clone_table", 3);
6127 }
6128
6129 /* Helper comparison function for qsorting by the DECL_UID stored in
6130 alias_pair->emitted_diags. */
6131
6132 static int
6133 tm_alias_pair_cmp (const void *x, const void *y)
6134 {
6135 const tm_alias_pair *p1 = (const tm_alias_pair *) x;
6136 const tm_alias_pair *p2 = (const tm_alias_pair *) y;
6137 if (p1->uid < p2->uid)
6138 return -1;
6139 if (p1->uid > p2->uid)
6140 return 1;
6141 return 0;
6142 }
6143
6144 void
6145 finish_tm_clone_pairs (void)
6146 {
6147 vec<tm_alias_pair> tm_alias_pairs = vNULL;
6148
6149 if (tm_clone_hash == NULL)
6150 return;
6151
6152 /* We need a determenistic order for the .tm_clone_table, otherwise
6153 we will get bootstrap comparison failures, so dump the hash table
6154 to a vector, sort it, and dump the vector. */
6155
6156 /* Dump the hashtable to a vector. */
6157 tree_map *map;
6158 hash_table<tm_clone_hasher>::iterator iter;
6159 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
6160 {
6161 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
6162 tm_alias_pairs.safe_push (p);
6163 }
6164 /* Sort it. */
6165 tm_alias_pairs.qsort (tm_alias_pair_cmp);
6166
6167 /* Dump it. */
6168 dump_tm_clone_pairs (tm_alias_pairs);
6169
6170 tm_clone_hash->empty ();
6171 tm_clone_hash = NULL;
6172 tm_alias_pairs.release ();
6173 }
6174
6175
6176 /* Emit an assembler directive to set symbol for DECL visibility to
6177 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */
6178
6179 void
6180 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED,
6181 int vis ATTRIBUTE_UNUSED)
6182 {
6183 #ifdef HAVE_GAS_HIDDEN
6184 static const char * const visibility_types[] = {
6185 NULL, "protected", "hidden", "internal"
6186 };
6187
6188 const char *name, *type;
6189 tree id;
6190
6191 id = DECL_ASSEMBLER_NAME (decl);
6192 ultimate_transparent_alias_target (&id);
6193 name = IDENTIFIER_POINTER (id);
6194
6195 type = visibility_types[vis];
6196
6197 fprintf (asm_out_file, "\t.%s\t", type);
6198 assemble_name (asm_out_file, name);
6199 fprintf (asm_out_file, "\n");
6200 #else
6201 if (!DECL_ARTIFICIAL (decl))
6202 warning (OPT_Wattributes, "visibility attribute not supported "
6203 "in this configuration; ignored");
6204 #endif
6205 }
6206
6207 /* A helper function to call assemble_visibility when needed for a decl. */
6208
6209 int
6210 maybe_assemble_visibility (tree decl)
6211 {
6212 enum symbol_visibility vis = DECL_VISIBILITY (decl);
6213 if (vis != VISIBILITY_DEFAULT)
6214 {
6215 targetm.asm_out.assemble_visibility (decl, vis);
6216 return 1;
6217 }
6218 else
6219 return 0;
6220 }
6221
6222 /* Returns 1 if the target configuration supports defining public symbols
6223 so that one of them will be chosen at link time instead of generating a
6224 multiply-defined symbol error, whether through the use of weak symbols or
6225 a target-specific mechanism for having duplicates discarded. */
6226
6227 int
6228 supports_one_only (void)
6229 {
6230 if (SUPPORTS_ONE_ONLY)
6231 return 1;
6232 return TARGET_SUPPORTS_WEAK;
6233 }
6234
6235 /* Set up DECL as a public symbol that can be defined in multiple
6236 translation units without generating a linker error. */
6237
6238 void
6239 make_decl_one_only (tree decl, tree comdat_group)
6240 {
6241 struct symtab_node *symbol;
6242 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6243
6244 TREE_PUBLIC (decl) = 1;
6245
6246 if (VAR_P (decl))
6247 symbol = varpool_node::get_create (decl);
6248 else
6249 symbol = cgraph_node::get_create (decl);
6250
6251 if (SUPPORTS_ONE_ONLY)
6252 {
6253 #ifdef MAKE_DECL_ONE_ONLY
6254 MAKE_DECL_ONE_ONLY (decl);
6255 #endif
6256 symbol->set_comdat_group (comdat_group);
6257 }
6258 else if (VAR_P (decl)
6259 && (DECL_INITIAL (decl) == 0
6260 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
6261 DECL_COMMON (decl) = 1;
6262 else
6263 {
6264 gcc_assert (TARGET_SUPPORTS_WEAK);
6265 DECL_WEAK (decl) = 1;
6266 }
6267 }
6268
6269 void
6270 init_varasm_once (void)
6271 {
6272 section_htab = hash_table<section_hasher>::create_ggc (31);
6273 object_block_htab = hash_table<object_block_hasher>::create_ggc (31);
6274 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009);
6275
6276 shared_constant_pool = create_constant_pool ();
6277
6278 #ifdef TEXT_SECTION_ASM_OP
6279 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6280 TEXT_SECTION_ASM_OP);
6281 #endif
6282
6283 #ifdef DATA_SECTION_ASM_OP
6284 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6285 DATA_SECTION_ASM_OP);
6286 #endif
6287
6288 #ifdef SDATA_SECTION_ASM_OP
6289 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6290 SDATA_SECTION_ASM_OP);
6291 #endif
6292
6293 #ifdef READONLY_DATA_SECTION_ASM_OP
6294 readonly_data_section = get_unnamed_section (0, output_section_asm_op,
6295 READONLY_DATA_SECTION_ASM_OP);
6296 #endif
6297
6298 #ifdef CTORS_SECTION_ASM_OP
6299 ctors_section = get_unnamed_section (0, output_section_asm_op,
6300 CTORS_SECTION_ASM_OP);
6301 #endif
6302
6303 #ifdef DTORS_SECTION_ASM_OP
6304 dtors_section = get_unnamed_section (0, output_section_asm_op,
6305 DTORS_SECTION_ASM_OP);
6306 #endif
6307
6308 #ifdef BSS_SECTION_ASM_OP
6309 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6310 output_section_asm_op,
6311 BSS_SECTION_ASM_OP);
6312 #endif
6313
6314 #ifdef SBSS_SECTION_ASM_OP
6315 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6316 output_section_asm_op,
6317 SBSS_SECTION_ASM_OP);
6318 #endif
6319
6320 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6321 | SECTION_COMMON, emit_tls_common);
6322 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6323 | SECTION_COMMON, emit_local);
6324 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6325 | SECTION_COMMON, emit_common);
6326
6327 #if defined ASM_OUTPUT_ALIGNED_BSS
6328 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS,
6329 emit_bss);
6330 #endif
6331
6332 targetm.asm_out.init_sections ();
6333
6334 if (readonly_data_section == NULL)
6335 readonly_data_section = text_section;
6336
6337 #ifdef ASM_OUTPUT_EXTERNAL
6338 pending_assemble_externals_set = new hash_set<tree>;
6339 #endif
6340 }
6341
6342 enum tls_model
6343 decl_default_tls_model (const_tree decl)
6344 {
6345 enum tls_model kind;
6346 bool is_local;
6347
6348 is_local = targetm.binds_local_p (decl);
6349 if (!flag_shlib)
6350 {
6351 if (is_local)
6352 kind = TLS_MODEL_LOCAL_EXEC;
6353 else
6354 kind = TLS_MODEL_INITIAL_EXEC;
6355 }
6356
6357 /* Local dynamic is inefficient when we're not combining the
6358 parts of the address. */
6359 else if (optimize && is_local)
6360 kind = TLS_MODEL_LOCAL_DYNAMIC;
6361 else
6362 kind = TLS_MODEL_GLOBAL_DYNAMIC;
6363 if (kind < flag_tls_default)
6364 kind = flag_tls_default;
6365
6366 return kind;
6367 }
6368
6369 /* Select a set of attributes for section NAME based on the properties
6370 of DECL and whether or not RELOC indicates that DECL's initializer
6371 might contain runtime relocations.
6372
6373 We make the section read-only and executable for a function decl,
6374 read-only for a const data decl, and writable for a non-const data decl. */
6375
6376 unsigned int
6377 default_section_type_flags (tree decl, const char *name, int reloc)
6378 {
6379 unsigned int flags;
6380
6381 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
6382 flags = SECTION_CODE;
6383 else if (decl)
6384 {
6385 enum section_category category
6386 = categorize_decl_for_section (decl, reloc);
6387 if (decl_readonly_section_1 (category))
6388 flags = 0;
6389 else if (category == SECCAT_DATA_REL_RO
6390 || category == SECCAT_DATA_REL_RO_LOCAL)
6391 flags = SECTION_WRITE | SECTION_RELRO;
6392 else
6393 flags = SECTION_WRITE;
6394 }
6395 else
6396 {
6397 flags = SECTION_WRITE;
6398 if (strcmp (name, ".data.rel.ro") == 0
6399 || strcmp (name, ".data.rel.ro.local") == 0)
6400 flags |= SECTION_RELRO;
6401 }
6402
6403 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl))
6404 flags |= SECTION_LINKONCE;
6405
6406 if (strcmp (name, ".vtable_map_vars") == 0)
6407 flags |= SECTION_LINKONCE;
6408
6409 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6410 flags |= SECTION_TLS | SECTION_WRITE;
6411
6412 if (strcmp (name, ".bss") == 0
6413 || strncmp (name, ".bss.", 5) == 0
6414 || strncmp (name, ".gnu.linkonce.b.", 16) == 0
6415 || strcmp (name, ".persistent.bss") == 0
6416 || strcmp (name, ".sbss") == 0
6417 || strncmp (name, ".sbss.", 6) == 0
6418 || strncmp (name, ".gnu.linkonce.sb.", 17) == 0)
6419 flags |= SECTION_BSS;
6420
6421 if (strcmp (name, ".tdata") == 0
6422 || strncmp (name, ".tdata.", 7) == 0
6423 || strncmp (name, ".gnu.linkonce.td.", 17) == 0)
6424 flags |= SECTION_TLS;
6425
6426 if (strcmp (name, ".tbss") == 0
6427 || strncmp (name, ".tbss.", 6) == 0
6428 || strncmp (name, ".gnu.linkonce.tb.", 17) == 0)
6429 flags |= SECTION_TLS | SECTION_BSS;
6430
6431 /* Various sections have special ELF types that the assembler will
6432 assign by default based on the name. They are neither SHT_PROGBITS
6433 nor SHT_NOBITS, so when changing sections we don't want to print a
6434 section type (@progbits or @nobits). Rather than duplicating the
6435 assembler's knowledge of what those special name patterns are, just
6436 let the assembler choose the type if we don't know a specific
6437 reason to set it to something other than the default. SHT_PROGBITS
6438 is the default for sections whose name is not specially known to
6439 the assembler, so it does no harm to leave the choice to the
6440 assembler when @progbits is the best thing we know to use. If
6441 someone is silly enough to emit code or TLS variables to one of
6442 these sections, then don't handle them specially.
6443
6444 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
6445 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
6446 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
6447 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
6448 flags |= SECTION_NOTYPE;
6449
6450 return flags;
6451 }
6452
6453 /* Return true if the target supports some form of global BSS,
6454 either through bss_noswitch_section, or by selecting a BSS
6455 section in TARGET_ASM_SELECT_SECTION. */
6456
6457 bool
6458 have_global_bss_p (void)
6459 {
6460 return bss_noswitch_section || targetm.have_switchable_bss_sections;
6461 }
6462
6463 /* Output assembly to switch to section NAME with attribute FLAGS.
6464 Four variants for common object file formats. */
6465
6466 void
6467 default_no_named_section (const char *name ATTRIBUTE_UNUSED,
6468 unsigned int flags ATTRIBUTE_UNUSED,
6469 tree decl ATTRIBUTE_UNUSED)
6470 {
6471 /* Some object formats don't support named sections at all. The
6472 front-end should already have flagged this as an error. */
6473 gcc_unreachable ();
6474 }
6475
6476 #ifndef TLS_SECTION_ASM_FLAG
6477 #define TLS_SECTION_ASM_FLAG 'T'
6478 #endif
6479
6480 void
6481 default_elf_asm_named_section (const char *name, unsigned int flags,
6482 tree decl)
6483 {
6484 char flagchars[11], *f = flagchars;
6485 unsigned int numeric_value = 0;
6486
6487 /* If we have already declared this section, we can use an
6488 abbreviated form to switch back to it -- unless this section is
6489 part of a COMDAT groups, in which case GAS requires the full
6490 declaration every time. */
6491 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6492 && (flags & SECTION_DECLARED))
6493 {
6494 fprintf (asm_out_file, "\t.section\t%s\n", name);
6495 return;
6496 }
6497
6498 /* If we have a machine specific flag, then use the numeric value to pass
6499 this on to GAS. */
6500 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value))
6501 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value);
6502 else
6503 {
6504 if (!(flags & SECTION_DEBUG))
6505 *f++ = 'a';
6506 #if HAVE_GAS_SECTION_EXCLUDE
6507 if (flags & SECTION_EXCLUDE)
6508 *f++ = 'e';
6509 #endif
6510 if (flags & SECTION_WRITE)
6511 *f++ = 'w';
6512 if (flags & SECTION_CODE)
6513 *f++ = 'x';
6514 if (flags & SECTION_SMALL)
6515 *f++ = 's';
6516 if (flags & SECTION_MERGE)
6517 *f++ = 'M';
6518 if (flags & SECTION_STRINGS)
6519 *f++ = 'S';
6520 if (flags & SECTION_TLS)
6521 *f++ = TLS_SECTION_ASM_FLAG;
6522 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6523 *f++ = 'G';
6524 #ifdef MACH_DEP_SECTION_ASM_FLAG
6525 if (flags & SECTION_MACH_DEP)
6526 *f++ = MACH_DEP_SECTION_ASM_FLAG;
6527 #endif
6528 *f = '\0';
6529 }
6530
6531 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
6532
6533 /* default_section_type_flags (above) knows which flags need special
6534 handling here, and sets NOTYPE when none of these apply so that the
6535 assembler's logic for default types can apply to user-chosen
6536 section names. */
6537 if (!(flags & SECTION_NOTYPE))
6538 {
6539 const char *type;
6540 const char *format;
6541
6542 if (flags & SECTION_BSS)
6543 type = "nobits";
6544 else
6545 type = "progbits";
6546
6547 format = ",@%s";
6548 /* On platforms that use "@" as the assembly comment character,
6549 use "%" instead. */
6550 if (strcmp (ASM_COMMENT_START, "@") == 0)
6551 format = ",%%%s";
6552 fprintf (asm_out_file, format, type);
6553
6554 if (flags & SECTION_ENTSIZE)
6555 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
6556 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6557 {
6558 if (TREE_CODE (decl) == IDENTIFIER_NODE)
6559 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl));
6560 else
6561 fprintf (asm_out_file, ",%s,comdat",
6562 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
6563 }
6564 }
6565
6566 putc ('\n', asm_out_file);
6567 }
6568
6569 void
6570 default_coff_asm_named_section (const char *name, unsigned int flags,
6571 tree decl ATTRIBUTE_UNUSED)
6572 {
6573 char flagchars[8], *f = flagchars;
6574
6575 if (flags & SECTION_WRITE)
6576 *f++ = 'w';
6577 if (flags & SECTION_CODE)
6578 *f++ = 'x';
6579 *f = '\0';
6580
6581 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars);
6582 }
6583
6584 void
6585 default_pe_asm_named_section (const char *name, unsigned int flags,
6586 tree decl)
6587 {
6588 default_coff_asm_named_section (name, flags, decl);
6589
6590 if (flags & SECTION_LINKONCE)
6591 {
6592 /* Functions may have been compiled at various levels of
6593 optimization so we can't use `same_size' here.
6594 Instead, have the linker pick one. */
6595 fprintf (asm_out_file, "\t.linkonce %s\n",
6596 (flags & SECTION_CODE ? "discard" : "same_size"));
6597 }
6598 }
6599 \f
6600 /* The lame default section selector. */
6601
6602 section *
6603 default_select_section (tree decl, int reloc,
6604 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6605 {
6606 if (DECL_P (decl))
6607 {
6608 if (decl_readonly_section (decl, reloc))
6609 return readonly_data_section;
6610 }
6611 else if (TREE_CODE (decl) == CONSTRUCTOR)
6612 {
6613 if (! ((flag_pic && reloc)
6614 || !TREE_READONLY (decl)
6615 || TREE_SIDE_EFFECTS (decl)
6616 || !TREE_CONSTANT (decl)))
6617 return readonly_data_section;
6618 }
6619 else if (TREE_CODE (decl) == STRING_CST)
6620 return readonly_data_section;
6621 else if (! (flag_pic && reloc))
6622 return readonly_data_section;
6623
6624 return data_section;
6625 }
6626
6627 enum section_category
6628 categorize_decl_for_section (const_tree decl, int reloc)
6629 {
6630 enum section_category ret;
6631
6632 if (TREE_CODE (decl) == FUNCTION_DECL)
6633 return SECCAT_TEXT;
6634 else if (TREE_CODE (decl) == STRING_CST)
6635 {
6636 if ((flag_sanitize & SANITIZE_ADDRESS)
6637 && asan_protect_global (CONST_CAST_TREE (decl)))
6638 /* or !flag_merge_constants */
6639 return SECCAT_RODATA;
6640 else
6641 return SECCAT_RODATA_MERGE_STR;
6642 }
6643 else if (VAR_P (decl))
6644 {
6645 tree d = CONST_CAST_TREE (decl);
6646 if (bss_initializer_p (decl))
6647 ret = SECCAT_BSS;
6648 else if (! TREE_READONLY (decl)
6649 || TREE_SIDE_EFFECTS (decl)
6650 || (DECL_INITIAL (decl)
6651 && ! TREE_CONSTANT (DECL_INITIAL (decl))))
6652 {
6653 /* Here the reloc_rw_mask is not testing whether the section should
6654 be read-only or not, but whether the dynamic link will have to
6655 do something. If so, we wish to segregate the data in order to
6656 minimize cache misses inside the dynamic linker. */
6657 if (reloc & targetm.asm_out.reloc_rw_mask ())
6658 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
6659 else
6660 ret = SECCAT_DATA;
6661 }
6662 else if (reloc & targetm.asm_out.reloc_rw_mask ())
6663 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
6664 else if (reloc || flag_merge_constants < 2
6665 || ((flag_sanitize & SANITIZE_ADDRESS)
6666 /* PR 81697: for architectures that use section anchors we
6667 need to ignore DECL_RTL_SET_P (decl) for string constants
6668 inside this asan_protect_global call because otherwise
6669 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
6670 section, set DECL_RTL (decl) later on and add DECL to
6671 protected globals via successive asan_protect_global
6672 calls. In this scenario we'll end up with wrong
6673 alignment of these strings at runtime and possible ASan
6674 false positives. */
6675 && asan_protect_global (d, use_object_blocks_p ()
6676 && use_blocks_for_decl_p (d))))
6677 /* C and C++ don't allow different variables to share the same
6678 location. -fmerge-all-constants allows even that (at the
6679 expense of not conforming). */
6680 ret = SECCAT_RODATA;
6681 else if (DECL_INITIAL (decl)
6682 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
6683 ret = SECCAT_RODATA_MERGE_STR_INIT;
6684 else
6685 ret = SECCAT_RODATA_MERGE_CONST;
6686 }
6687 else if (TREE_CODE (decl) == CONSTRUCTOR)
6688 {
6689 if ((reloc & targetm.asm_out.reloc_rw_mask ())
6690 || TREE_SIDE_EFFECTS (decl)
6691 || ! TREE_CONSTANT (decl))
6692 ret = SECCAT_DATA;
6693 else
6694 ret = SECCAT_RODATA;
6695 }
6696 else
6697 ret = SECCAT_RODATA;
6698
6699 /* There are no read-only thread-local sections. */
6700 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6701 {
6702 /* Note that this would be *just* SECCAT_BSS, except that there's
6703 no concept of a read-only thread-local-data section. */
6704 if (ret == SECCAT_BSS
6705 || DECL_INITIAL (decl) == NULL
6706 || (flag_zero_initialized_in_bss
6707 && initializer_zerop (DECL_INITIAL (decl))))
6708 ret = SECCAT_TBSS;
6709 else
6710 ret = SECCAT_TDATA;
6711 }
6712
6713 /* If the target uses small data sections, select it. */
6714 else if (targetm.in_small_data_p (decl))
6715 {
6716 if (ret == SECCAT_BSS)
6717 ret = SECCAT_SBSS;
6718 else if (targetm.have_srodata_section && ret == SECCAT_RODATA)
6719 ret = SECCAT_SRODATA;
6720 else
6721 ret = SECCAT_SDATA;
6722 }
6723
6724 return ret;
6725 }
6726
6727 static bool
6728 decl_readonly_section_1 (enum section_category category)
6729 {
6730 switch (category)
6731 {
6732 case SECCAT_RODATA:
6733 case SECCAT_RODATA_MERGE_STR:
6734 case SECCAT_RODATA_MERGE_STR_INIT:
6735 case SECCAT_RODATA_MERGE_CONST:
6736 case SECCAT_SRODATA:
6737 return true;
6738 default:
6739 return false;
6740 }
6741 }
6742
6743 bool
6744 decl_readonly_section (const_tree decl, int reloc)
6745 {
6746 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc));
6747 }
6748
6749 /* Select a section based on the above categorization. */
6750
6751 section *
6752 default_elf_select_section (tree decl, int reloc,
6753 unsigned HOST_WIDE_INT align)
6754 {
6755 const char *sname;
6756 switch (categorize_decl_for_section (decl, reloc))
6757 {
6758 case SECCAT_TEXT:
6759 /* We're not supposed to be called on FUNCTION_DECLs. */
6760 gcc_unreachable ();
6761 case SECCAT_RODATA:
6762 return readonly_data_section;
6763 case SECCAT_RODATA_MERGE_STR:
6764 return mergeable_string_section (decl, align, 0);
6765 case SECCAT_RODATA_MERGE_STR_INIT:
6766 return mergeable_string_section (DECL_INITIAL (decl), align, 0);
6767 case SECCAT_RODATA_MERGE_CONST:
6768 return mergeable_constant_section (DECL_MODE (decl), align, 0);
6769 case SECCAT_SRODATA:
6770 sname = ".sdata2";
6771 break;
6772 case SECCAT_DATA:
6773 return data_section;
6774 case SECCAT_DATA_REL:
6775 sname = ".data.rel";
6776 break;
6777 case SECCAT_DATA_REL_LOCAL:
6778 sname = ".data.rel.local";
6779 break;
6780 case SECCAT_DATA_REL_RO:
6781 sname = ".data.rel.ro";
6782 break;
6783 case SECCAT_DATA_REL_RO_LOCAL:
6784 sname = ".data.rel.ro.local";
6785 break;
6786 case SECCAT_SDATA:
6787 sname = ".sdata";
6788 break;
6789 case SECCAT_TDATA:
6790 sname = ".tdata";
6791 break;
6792 case SECCAT_BSS:
6793 if (bss_section)
6794 return bss_section;
6795 sname = ".bss";
6796 break;
6797 case SECCAT_SBSS:
6798 sname = ".sbss";
6799 break;
6800 case SECCAT_TBSS:
6801 sname = ".tbss";
6802 break;
6803 default:
6804 gcc_unreachable ();
6805 }
6806
6807 return get_named_section (decl, sname, reloc);
6808 }
6809
6810 /* Construct a unique section name based on the decl name and the
6811 categorization performed above. */
6812
6813 void
6814 default_unique_section (tree decl, int reloc)
6815 {
6816 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
6817 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
6818 const char *prefix, *name, *linkonce;
6819 char *string;
6820 tree id;
6821
6822 switch (categorize_decl_for_section (decl, reloc))
6823 {
6824 case SECCAT_TEXT:
6825 prefix = one_only ? ".t" : ".text";
6826 break;
6827 case SECCAT_RODATA:
6828 case SECCAT_RODATA_MERGE_STR:
6829 case SECCAT_RODATA_MERGE_STR_INIT:
6830 case SECCAT_RODATA_MERGE_CONST:
6831 prefix = one_only ? ".r" : ".rodata";
6832 break;
6833 case SECCAT_SRODATA:
6834 prefix = one_only ? ".s2" : ".sdata2";
6835 break;
6836 case SECCAT_DATA:
6837 prefix = one_only ? ".d" : ".data";
6838 break;
6839 case SECCAT_DATA_REL:
6840 prefix = one_only ? ".d.rel" : ".data.rel";
6841 break;
6842 case SECCAT_DATA_REL_LOCAL:
6843 prefix = one_only ? ".d.rel.local" : ".data.rel.local";
6844 break;
6845 case SECCAT_DATA_REL_RO:
6846 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
6847 break;
6848 case SECCAT_DATA_REL_RO_LOCAL:
6849 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
6850 break;
6851 case SECCAT_SDATA:
6852 prefix = one_only ? ".s" : ".sdata";
6853 break;
6854 case SECCAT_BSS:
6855 prefix = one_only ? ".b" : ".bss";
6856 break;
6857 case SECCAT_SBSS:
6858 prefix = one_only ? ".sb" : ".sbss";
6859 break;
6860 case SECCAT_TDATA:
6861 prefix = one_only ? ".td" : ".tdata";
6862 break;
6863 case SECCAT_TBSS:
6864 prefix = one_only ? ".tb" : ".tbss";
6865 break;
6866 default:
6867 gcc_unreachable ();
6868 }
6869
6870 id = DECL_ASSEMBLER_NAME (decl);
6871 ultimate_transparent_alias_target (&id);
6872 name = IDENTIFIER_POINTER (id);
6873 name = targetm.strip_name_encoding (name);
6874
6875 /* If we're using one_only, then there needs to be a .gnu.linkonce
6876 prefix to the section name. */
6877 linkonce = one_only ? ".gnu.linkonce" : "";
6878
6879 string = ACONCAT ((linkonce, prefix, ".", name, NULL));
6880
6881 set_decl_section_name (decl, string);
6882 }
6883
6884 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */
6885
6886 static int
6887 compute_reloc_for_rtx_1 (const_rtx x)
6888 {
6889 switch (GET_CODE (x))
6890 {
6891 case SYMBOL_REF:
6892 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
6893 case LABEL_REF:
6894 return 1;
6895 default:
6896 return 0;
6897 }
6898 }
6899
6900 /* Like compute_reloc_for_constant, except for an RTX. The return value
6901 is a mask for which bit 1 indicates a global relocation, and bit 0
6902 indicates a local relocation. */
6903
6904 static int
6905 compute_reloc_for_rtx (const_rtx x)
6906 {
6907 switch (GET_CODE (x))
6908 {
6909 case SYMBOL_REF:
6910 case LABEL_REF:
6911 return compute_reloc_for_rtx_1 (x);
6912
6913 case CONST:
6914 {
6915 int reloc = 0;
6916 subrtx_iterator::array_type array;
6917 FOR_EACH_SUBRTX (iter, array, x, ALL)
6918 reloc |= compute_reloc_for_rtx_1 (*iter);
6919 return reloc;
6920 }
6921
6922 default:
6923 return 0;
6924 }
6925 }
6926
6927 section *
6928 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
6929 rtx x,
6930 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6931 {
6932 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
6933 return data_section;
6934 else
6935 return readonly_data_section;
6936 }
6937
6938 section *
6939 default_elf_select_rtx_section (machine_mode mode, rtx x,
6940 unsigned HOST_WIDE_INT align)
6941 {
6942 int reloc = compute_reloc_for_rtx (x);
6943
6944 /* ??? Handle small data here somehow. */
6945
6946 if (reloc & targetm.asm_out.reloc_rw_mask ())
6947 {
6948 if (reloc == 1)
6949 return get_named_section (NULL, ".data.rel.ro.local", 1);
6950 else
6951 return get_named_section (NULL, ".data.rel.ro", 3);
6952 }
6953
6954 return mergeable_constant_section (mode, align, 0);
6955 }
6956
6957 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */
6958
6959 void
6960 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
6961 {
6962 rtx symbol;
6963 int flags;
6964
6965 /* Careful not to prod global register variables. */
6966 if (!MEM_P (rtl))
6967 return;
6968 symbol = XEXP (rtl, 0);
6969 if (GET_CODE (symbol) != SYMBOL_REF)
6970 return;
6971
6972 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO;
6973 if (TREE_CODE (decl) == FUNCTION_DECL)
6974 flags |= SYMBOL_FLAG_FUNCTION;
6975 if (targetm.binds_local_p (decl))
6976 flags |= SYMBOL_FLAG_LOCAL;
6977 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6978 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
6979 else if (targetm.in_small_data_p (decl))
6980 flags |= SYMBOL_FLAG_SMALL;
6981 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without
6982 being PUBLIC, the thing *must* be defined in this translation unit.
6983 Prevent this buglet from being propagated into rtl code as well. */
6984 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
6985 flags |= SYMBOL_FLAG_EXTERNAL;
6986
6987 SYMBOL_REF_FLAGS (symbol) = flags;
6988 }
6989
6990 /* By default, we do nothing for encode_section_info, so we need not
6991 do anything but discard the '*' marker. */
6992
6993 const char *
6994 default_strip_name_encoding (const char *str)
6995 {
6996 return str + (*str == '*');
6997 }
6998
6999 #ifdef ASM_OUTPUT_DEF
7000 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
7001 anchor relative to ".", the current section position. */
7002
7003 void
7004 default_asm_output_anchor (rtx symbol)
7005 {
7006 char buffer[100];
7007
7008 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
7009 SYMBOL_REF_BLOCK_OFFSET (symbol));
7010 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
7011 }
7012 #endif
7013
7014 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
7015
7016 bool
7017 default_use_anchors_for_symbol_p (const_rtx symbol)
7018 {
7019 tree decl;
7020 section *sect = SYMBOL_REF_BLOCK (symbol)->sect;
7021
7022 /* This function should only be called with non-zero SYMBOL_REF_BLOCK,
7023 furthermore get_block_for_section should not create object blocks
7024 for mergeable sections. */
7025 gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE));
7026
7027 /* Don't use anchors for small data sections. The small data register
7028 acts as an anchor for such sections. */
7029 if (sect->common.flags & SECTION_SMALL)
7030 return false;
7031
7032 decl = SYMBOL_REF_DECL (symbol);
7033 if (decl && DECL_P (decl))
7034 {
7035 /* Don't use section anchors for decls that might be defined or
7036 usurped by other modules. */
7037 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl))
7038 return false;
7039
7040 /* Don't use section anchors for decls that will be placed in a
7041 small data section. */
7042 /* ??? Ideally, this check would be redundant with the SECTION_SMALL
7043 one above. The problem is that we only use SECTION_SMALL for
7044 sections that should be marked as small in the section directive. */
7045 if (targetm.in_small_data_p (decl))
7046 return false;
7047
7048 /* Don't use section anchors for decls that won't fit inside a single
7049 anchor range to reduce the amount of instructions required to refer
7050 to the entire declaration. */
7051 if (DECL_SIZE_UNIT (decl) == NULL_TREE
7052 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))
7053 || (tree_to_uhwi (DECL_SIZE_UNIT (decl))
7054 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset))
7055 return false;
7056
7057 }
7058 return true;
7059 }
7060
7061 /* Return true when RESOLUTION indicate that symbol will be bound to the
7062 definition provided by current .o file. */
7063
7064 static bool
7065 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution)
7066 {
7067 return (resolution == LDPR_PREVAILING_DEF
7068 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7069 || resolution == LDPR_PREVAILING_DEF_IRONLY);
7070 }
7071
7072 /* Return true when RESOLUTION indicate that symbol will be bound locally
7073 within current executable or DSO. */
7074
7075 static bool
7076 resolution_local_p (enum ld_plugin_symbol_resolution resolution)
7077 {
7078 return (resolution == LDPR_PREVAILING_DEF
7079 || resolution == LDPR_PREVAILING_DEF_IRONLY
7080 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7081 || resolution == LDPR_PREEMPTED_REG
7082 || resolution == LDPR_PREEMPTED_IR
7083 || resolution == LDPR_RESOLVED_IR
7084 || resolution == LDPR_RESOLVED_EXEC);
7085 }
7086
7087 /* COMMON_LOCAL_P is true means that the linker can guarantee that an
7088 uninitialized common symbol in the executable will still be defined
7089 (through COPY relocation) in the executable. */
7090
7091 bool
7092 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate,
7093 bool extern_protected_data, bool common_local_p)
7094 {
7095 /* A non-decl is an entry in the constant pool. */
7096 if (!DECL_P (exp))
7097 return true;
7098
7099 /* Weakrefs may not bind locally, even though the weakref itself is always
7100 static and therefore local. Similarly, the resolver for ifunc functions
7101 might resolve to a non-local function.
7102 FIXME: We can resolve the weakref case more curefuly by looking at the
7103 weakref alias. */
7104 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
7105 || (TREE_CODE (exp) == FUNCTION_DECL
7106 && cgraph_node::get (exp)
7107 && cgraph_node::get (exp)->ifunc_resolver))
7108 return false;
7109
7110 /* Static variables are always local. */
7111 if (! TREE_PUBLIC (exp))
7112 return true;
7113
7114 /* With resolution file in hand, take look into resolutions.
7115 We can't just return true for resolved_locally symbols,
7116 because dynamic linking might overwrite symbols
7117 in shared libraries. */
7118 bool resolved_locally = false;
7119
7120 bool uninited_common = (DECL_COMMON (exp)
7121 && (DECL_INITIAL (exp) == NULL
7122 || (!in_lto_p
7123 && DECL_INITIAL (exp) == error_mark_node)));
7124
7125 /* A non-external variable is defined locally only if it isn't
7126 uninitialized COMMON variable or common_local_p is true. */
7127 bool defined_locally = (!DECL_EXTERNAL (exp)
7128 && (!uninited_common || common_local_p));
7129 if (symtab_node *node = symtab_node::get (exp))
7130 {
7131 if (node->in_other_partition)
7132 defined_locally = true;
7133 if (node->can_be_discarded_p ())
7134 ;
7135 else if (resolution_to_local_definition_p (node->resolution))
7136 defined_locally = resolved_locally = true;
7137 else if (resolution_local_p (node->resolution))
7138 resolved_locally = true;
7139 }
7140 if (defined_locally && weak_dominate && !shlib)
7141 resolved_locally = true;
7142
7143 /* Undefined weak symbols are never defined locally. */
7144 if (DECL_WEAK (exp) && !defined_locally)
7145 return false;
7146
7147 /* A symbol is local if the user has said explicitly that it will be,
7148 or if we have a definition for the symbol. We cannot infer visibility
7149 for undefined symbols. */
7150 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT
7151 && (TREE_CODE (exp) == FUNCTION_DECL
7152 || !extern_protected_data
7153 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED)
7154 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally))
7155 return true;
7156
7157 /* If PIC, then assume that any global name can be overridden by
7158 symbols resolved from other modules. */
7159 if (shlib)
7160 return false;
7161
7162 /* Variables defined outside this object might not be local. */
7163 if (DECL_EXTERNAL (exp) && !resolved_locally)
7164 return false;
7165
7166 /* Non-dominant weak symbols are not defined locally. */
7167 if (DECL_WEAK (exp) && !resolved_locally)
7168 return false;
7169
7170 /* Uninitialized COMMON variable may be unified with symbols
7171 resolved from other modules. */
7172 if (uninited_common && !resolved_locally)
7173 return false;
7174
7175 /* Otherwise we're left with initialized (or non-common) global data
7176 which is of necessity defined locally. */
7177 return true;
7178 }
7179
7180 /* Assume ELF-ish defaults, since that's pretty much the most liberal
7181 wrt cross-module name binding. */
7182
7183 bool
7184 default_binds_local_p (const_tree exp)
7185 {
7186 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false);
7187 }
7188
7189 /* Similar to default_binds_local_p, but common symbol may be local and
7190 extern protected data is non-local. */
7191
7192 bool
7193 default_binds_local_p_2 (const_tree exp)
7194 {
7195 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true,
7196 !flag_pic);
7197 }
7198
7199 bool
7200 default_binds_local_p_1 (const_tree exp, int shlib)
7201 {
7202 return default_binds_local_p_3 (exp, shlib != 0, false, false, false);
7203 }
7204
7205 /* Return true when references to DECL must bind to current definition in
7206 final executable.
7207
7208 The condition is usually equivalent to whether the function binds to the
7209 current module (shared library or executable), that is to binds_local_p.
7210 We use this fact to avoid need for another target hook and implement
7211 the logic using binds_local_p and just special cases where
7212 decl_binds_to_current_def_p is stronger than binds_local_p. In particular
7213 the weak definitions (that can be overwritten at linktime by other
7214 definition from different object file) and when resolution info is available
7215 we simply use the knowledge passed to us by linker plugin. */
7216 bool
7217 decl_binds_to_current_def_p (const_tree decl)
7218 {
7219 gcc_assert (DECL_P (decl));
7220 if (!targetm.binds_local_p (decl))
7221 return false;
7222 if (!TREE_PUBLIC (decl))
7223 return true;
7224
7225 /* When resolution is available, just use it. */
7226 if (symtab_node *node = symtab_node::get (decl))
7227 {
7228 if (node->resolution != LDPR_UNKNOWN
7229 && !node->can_be_discarded_p ())
7230 return resolution_to_local_definition_p (node->resolution);
7231 }
7232
7233 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
7234 binds locally but still can be overwritten), DECL_COMMON (can be merged
7235 with a non-common definition somewhere in the same module) or
7236 DECL_EXTERNAL.
7237 This rely on fact that binds_local_p behave as decl_replaceable_p
7238 for all other declaration types. */
7239 if (DECL_WEAK (decl))
7240 return false;
7241 if (DECL_COMMON (decl)
7242 && (DECL_INITIAL (decl) == NULL
7243 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
7244 return false;
7245 if (DECL_EXTERNAL (decl))
7246 return false;
7247 return true;
7248 }
7249
7250 /* A replaceable function or variable is one which may be replaced
7251 at link-time with an entirely different definition, provided that the
7252 replacement has the same type. For example, functions declared
7253 with __attribute__((weak)) on most systems are replaceable.
7254
7255 COMDAT functions are not replaceable, since all definitions of the
7256 function must be equivalent. It is important that COMDAT functions
7257 not be treated as replaceable so that use of C++ template
7258 instantiations is not penalized. */
7259
7260 bool
7261 decl_replaceable_p (tree decl)
7262 {
7263 gcc_assert (DECL_P (decl));
7264 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl))
7265 return false;
7266 if (!flag_semantic_interposition
7267 && !DECL_WEAK (decl))
7268 return false;
7269 return !decl_binds_to_current_def_p (decl);
7270 }
7271
7272 /* Default function to output code that will globalize a label. A
7273 target must define GLOBAL_ASM_OP or provide its own function to
7274 globalize a label. */
7275 #ifdef GLOBAL_ASM_OP
7276 void
7277 default_globalize_label (FILE * stream, const char *name)
7278 {
7279 fputs (GLOBAL_ASM_OP, stream);
7280 assemble_name (stream, name);
7281 putc ('\n', stream);
7282 }
7283 #endif /* GLOBAL_ASM_OP */
7284
7285 /* Default function to output code that will globalize a declaration. */
7286 void
7287 default_globalize_decl_name (FILE * stream, tree decl)
7288 {
7289 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7290 targetm.asm_out.globalize_label (stream, name);
7291 }
7292
7293 /* Default function to output a label for unwind information. The
7294 default is to do nothing. A target that needs nonlocal labels for
7295 unwind information must provide its own function to do this. */
7296 void
7297 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
7298 tree decl ATTRIBUTE_UNUSED,
7299 int for_eh ATTRIBUTE_UNUSED,
7300 int empty ATTRIBUTE_UNUSED)
7301 {
7302 }
7303
7304 /* Default function to output a label to divide up the exception table.
7305 The default is to do nothing. A target that needs/wants to divide
7306 up the table must provide it's own function to do this. */
7307 void
7308 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED)
7309 {
7310 }
7311
7312 /* This is how to output an internal numbered label where PREFIX is
7313 the class of label and LABELNO is the number within the class. */
7314
7315 void
7316 default_generate_internal_label (char *buf, const char *prefix,
7317 unsigned long labelno)
7318 {
7319 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7320 }
7321
7322 /* This is how to output an internal numbered label where PREFIX is
7323 the class of label and LABELNO is the number within the class. */
7324
7325 void
7326 default_internal_label (FILE *stream, const char *prefix,
7327 unsigned long labelno)
7328 {
7329 char *const buf = (char *) alloca (40 + strlen (prefix));
7330 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7331 ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
7332 }
7333
7334
7335 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */
7336
7337 void
7338 default_asm_declare_constant_name (FILE *file, const char *name,
7339 const_tree exp ATTRIBUTE_UNUSED,
7340 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7341 {
7342 assemble_label (file, name);
7343 }
7344
7345 /* This is the default behavior at the beginning of a file. It's
7346 controlled by two other target-hook toggles. */
7347 void
7348 default_file_start (void)
7349 {
7350 if (targetm.asm_file_start_app_off
7351 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
7352 fputs (ASM_APP_OFF, asm_out_file);
7353
7354 if (targetm.asm_file_start_file_directive)
7355 {
7356 /* LTO produced units have no meaningful main_input_filename. */
7357 if (in_lto_p)
7358 output_file_directive (asm_out_file, "<artificial>");
7359 else
7360 output_file_directive (asm_out_file, main_input_filename);
7361 }
7362 }
7363
7364 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END
7365 which emits a special section directive used to indicate whether or
7366 not this object file needs an executable stack. This is primarily
7367 a GNU extension to ELF but could be used on other targets. */
7368
7369 int trampolines_created;
7370
7371 void
7372 file_end_indicate_exec_stack (void)
7373 {
7374 unsigned int flags = SECTION_DEBUG;
7375 if (trampolines_created)
7376 flags |= SECTION_CODE;
7377
7378 switch_to_section (get_section (".note.GNU-stack", flags, NULL));
7379 }
7380
7381 /* Emit a special section directive to indicate that this object file
7382 was compiled with -fsplit-stack. This is used to let the linker
7383 detect calls between split-stack code and non-split-stack code, so
7384 that it can modify the split-stack code to allocate a sufficiently
7385 large stack. We emit another special section if there are any
7386 functions in this file which have the no_split_stack attribute, to
7387 prevent the linker from warning about being unable to convert the
7388 functions if they call non-split-stack code. */
7389
7390 void
7391 file_end_indicate_split_stack (void)
7392 {
7393 if (flag_split_stack)
7394 {
7395 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG,
7396 NULL));
7397 if (saw_no_split_stack)
7398 switch_to_section (get_section (".note.GNU-no-split-stack",
7399 SECTION_DEBUG, NULL));
7400 }
7401 }
7402
7403 /* Output DIRECTIVE (a C string) followed by a newline. This is used as
7404 a get_unnamed_section callback. */
7405
7406 void
7407 output_section_asm_op (const void *directive)
7408 {
7409 fprintf (asm_out_file, "%s\n", (const char *) directive);
7410 }
7411
7412 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if
7413 the current section is NEW_SECTION. */
7414
7415 void
7416 switch_to_section (section *new_section)
7417 {
7418 if (in_section == new_section)
7419 return;
7420
7421 if (new_section->common.flags & SECTION_FORGET)
7422 in_section = NULL;
7423 else
7424 in_section = new_section;
7425
7426 switch (SECTION_STYLE (new_section))
7427 {
7428 case SECTION_NAMED:
7429 targetm.asm_out.named_section (new_section->named.name,
7430 new_section->named.common.flags,
7431 new_section->named.decl);
7432 break;
7433
7434 case SECTION_UNNAMED:
7435 new_section->unnamed.callback (new_section->unnamed.data);
7436 break;
7437
7438 case SECTION_NOSWITCH:
7439 gcc_unreachable ();
7440 break;
7441 }
7442
7443 new_section->common.flags |= SECTION_DECLARED;
7444 }
7445
7446 /* If block symbol SYMBOL has not yet been assigned an offset, place
7447 it at the end of its block. */
7448
7449 void
7450 place_block_symbol (rtx symbol)
7451 {
7452 unsigned HOST_WIDE_INT size, mask, offset;
7453 struct constant_descriptor_rtx *desc;
7454 unsigned int alignment;
7455 struct object_block *block;
7456 tree decl;
7457
7458 gcc_assert (SYMBOL_REF_BLOCK (symbol));
7459 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
7460 return;
7461
7462 /* Work out the symbol's size and alignment. */
7463 if (CONSTANT_POOL_ADDRESS_P (symbol))
7464 {
7465 desc = SYMBOL_REF_CONSTANT (symbol);
7466 alignment = desc->align;
7467 size = GET_MODE_SIZE (desc->mode);
7468 }
7469 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7470 {
7471 decl = SYMBOL_REF_DECL (symbol);
7472 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl));
7473 alignment = DECL_ALIGN (decl);
7474 size = get_constant_size (DECL_INITIAL (decl));
7475 if ((flag_sanitize & SANITIZE_ADDRESS)
7476 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7477 && asan_protect_global (DECL_INITIAL (decl)))
7478 {
7479 size += asan_red_zone_size (size);
7480 alignment = MAX (alignment,
7481 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7482 }
7483 }
7484 else
7485 {
7486 struct symtab_node *snode;
7487 decl = SYMBOL_REF_DECL (symbol);
7488
7489 snode = symtab_node::get (decl);
7490 if (snode->alias)
7491 {
7492 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
7493
7494 gcc_assert (MEM_P (target)
7495 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF
7496 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0)));
7497 target = XEXP (target, 0);
7498 place_block_symbol (target);
7499 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
7500 return;
7501 }
7502 alignment = get_variable_align (decl);
7503 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7504 if ((flag_sanitize & SANITIZE_ADDRESS)
7505 && asan_protect_global (decl))
7506 {
7507 size += asan_red_zone_size (size);
7508 alignment = MAX (alignment,
7509 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7510 }
7511 }
7512
7513 /* Calculate the object's offset from the start of the block. */
7514 block = SYMBOL_REF_BLOCK (symbol);
7515 mask = alignment / BITS_PER_UNIT - 1;
7516 offset = (block->size + mask) & ~mask;
7517 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
7518
7519 /* Record the block's new alignment and size. */
7520 block->alignment = MAX (block->alignment, alignment);
7521 block->size = offset + size;
7522
7523 vec_safe_push (block->objects, symbol);
7524 }
7525
7526 /* Return the anchor that should be used to address byte offset OFFSET
7527 from the first object in BLOCK. MODEL is the TLS model used
7528 to access it. */
7529
7530 rtx
7531 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
7532 enum tls_model model)
7533 {
7534 char label[100];
7535 unsigned int begin, middle, end;
7536 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
7537 rtx anchor;
7538
7539 /* Work out the anchor's offset. Use an offset of 0 for the first
7540 anchor so that we don't pessimize the case where we take the address
7541 of a variable at the beginning of the block. This is particularly
7542 useful when a block has only one variable assigned to it.
7543
7544 We try to place anchors RANGE bytes apart, so there can then be
7545 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
7546 a ptr_mode offset. With some target settings, the lowest such
7547 anchor might be out of range for the lowest ptr_mode offset;
7548 likewise the highest anchor for the highest offset. Use anchors
7549 at the extreme ends of the ptr_mode range in such cases.
7550
7551 All arithmetic uses unsigned integers in order to avoid
7552 signed overflow. */
7553 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
7554 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
7555 range = max_offset - min_offset + 1;
7556 if (range == 0)
7557 offset = 0;
7558 else
7559 {
7560 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1);
7561 if (offset < 0)
7562 {
7563 delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
7564 delta -= delta % range;
7565 if (delta > bias)
7566 delta = bias;
7567 offset = (HOST_WIDE_INT) (-delta);
7568 }
7569 else
7570 {
7571 delta = (unsigned HOST_WIDE_INT) offset - min_offset;
7572 delta -= delta % range;
7573 if (delta > bias - 1)
7574 delta = bias - 1;
7575 offset = (HOST_WIDE_INT) delta;
7576 }
7577 }
7578
7579 /* Do a binary search to see if there's already an anchor we can use.
7580 Set BEGIN to the new anchor's index if not. */
7581 begin = 0;
7582 end = vec_safe_length (block->anchors);
7583 while (begin != end)
7584 {
7585 middle = (end + begin) / 2;
7586 anchor = (*block->anchors)[middle];
7587 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
7588 end = middle;
7589 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
7590 begin = middle + 1;
7591 else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
7592 end = middle;
7593 else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
7594 begin = middle + 1;
7595 else
7596 return anchor;
7597 }
7598
7599 /* Create a new anchor with a unique label. */
7600 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
7601 anchor = create_block_symbol (ggc_strdup (label), block, offset);
7602 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
7603 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
7604
7605 /* Insert it at index BEGIN. */
7606 vec_safe_insert (block->anchors, begin, anchor);
7607 return anchor;
7608 }
7609
7610 /* Output the objects in BLOCK. */
7611
7612 static void
7613 output_object_block (struct object_block *block)
7614 {
7615 struct constant_descriptor_rtx *desc;
7616 unsigned int i;
7617 HOST_WIDE_INT offset;
7618 tree decl;
7619 rtx symbol;
7620
7621 if (!block->objects)
7622 return;
7623
7624 /* Switch to the section and make sure that the first byte is
7625 suitably aligned. */
7626 /* Special case VTV comdat sections similar to assemble_variable. */
7627 if (SECTION_STYLE (block->sect) == SECTION_NAMED
7628 && block->sect->named.name
7629 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0))
7630 handle_vtv_comdat_section (block->sect, block->sect->named.decl);
7631 else
7632 switch_to_section (block->sect);
7633
7634 gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE));
7635 assemble_align (block->alignment);
7636
7637 /* Define the values of all anchors relative to the current section
7638 position. */
7639 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
7640 targetm.asm_out.output_anchor (symbol);
7641
7642 /* Output the objects themselves. */
7643 offset = 0;
7644 FOR_EACH_VEC_ELT (*block->objects, i, symbol)
7645 {
7646 /* Move to the object's offset, padding with zeros if necessary. */
7647 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
7648 offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
7649 if (CONSTANT_POOL_ADDRESS_P (symbol))
7650 {
7651 desc = SYMBOL_REF_CONSTANT (symbol);
7652 /* Pass 1 for align as we have already laid out everything in the block.
7653 So aligning shouldn't be necessary. */
7654 output_constant_pool_1 (desc, 1);
7655 offset += GET_MODE_SIZE (desc->mode);
7656 }
7657 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7658 {
7659 HOST_WIDE_INT size;
7660 decl = SYMBOL_REF_DECL (symbol);
7661 assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0),
7662 DECL_ALIGN (decl), false);
7663
7664 size = get_constant_size (DECL_INITIAL (decl));
7665 offset += size;
7666 if ((flag_sanitize & SANITIZE_ADDRESS)
7667 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7668 && asan_protect_global (DECL_INITIAL (decl)))
7669 {
7670 size = asan_red_zone_size (size);
7671 assemble_zeros (size);
7672 offset += size;
7673 }
7674 }
7675 else
7676 {
7677 HOST_WIDE_INT size;
7678 decl = SYMBOL_REF_DECL (symbol);
7679 assemble_variable_contents (decl, XSTR (symbol, 0), false, false);
7680 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7681 offset += size;
7682 if ((flag_sanitize & SANITIZE_ADDRESS)
7683 && asan_protect_global (decl))
7684 {
7685 size = asan_red_zone_size (size);
7686 assemble_zeros (size);
7687 offset += size;
7688 }
7689 }
7690 }
7691 }
7692
7693 /* A callback for qsort to compare object_blocks. */
7694
7695 static int
7696 output_object_block_compare (const void *x, const void *y)
7697 {
7698 object_block *p1 = *(object_block * const*)x;
7699 object_block *p2 = *(object_block * const*)y;
7700
7701 if (p1->sect->common.flags & SECTION_NAMED
7702 && !(p2->sect->common.flags & SECTION_NAMED))
7703 return 1;
7704
7705 if (!(p1->sect->common.flags & SECTION_NAMED)
7706 && p2->sect->common.flags & SECTION_NAMED)
7707 return -1;
7708
7709 if (p1->sect->common.flags & SECTION_NAMED
7710 && p2->sect->common.flags & SECTION_NAMED)
7711 return strcmp (p1->sect->named.name, p2->sect->named.name);
7712
7713 unsigned f1 = p1->sect->common.flags;
7714 unsigned f2 = p2->sect->common.flags;
7715 if (f1 == f2)
7716 return 0;
7717 return f1 < f2 ? -1 : 1;
7718 }
7719
7720 /* Output the definitions of all object_blocks. */
7721
7722 void
7723 output_object_blocks (void)
7724 {
7725 vec<object_block *, va_heap> v;
7726 v.create (object_block_htab->elements ());
7727 object_block *obj;
7728 hash_table<object_block_hasher>::iterator hi;
7729
7730 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi)
7731 v.quick_push (obj);
7732
7733 /* Sort them in order to output them in a deterministic manner,
7734 otherwise we may get .rodata sections in different orders with
7735 and without -g. */
7736 v.qsort (output_object_block_compare);
7737 unsigned i;
7738 FOR_EACH_VEC_ELT (v, i, obj)
7739 output_object_block (obj);
7740
7741 v.release ();
7742 }
7743
7744 /* This function provides a possible implementation of the
7745 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
7746 by -frecord-gcc-switches it creates a new mergeable, string section in the
7747 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
7748 contains the switches in ASCII format.
7749
7750 FIXME: This code does not correctly handle double quote characters
7751 that appear inside strings, (it strips them rather than preserving them).
7752 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
7753 characters - instead it treats them as sub-string separators. Since
7754 we want to emit NUL strings terminators into the object file we have to use
7755 ASM_OUTPUT_SKIP. */
7756
7757 int
7758 elf_record_gcc_switches (print_switch_type type, const char * name)
7759 {
7760 switch (type)
7761 {
7762 case SWITCH_TYPE_PASSED:
7763 ASM_OUTPUT_ASCII (asm_out_file, name, strlen (name));
7764 ASM_OUTPUT_SKIP (asm_out_file, HOST_WIDE_INT_1U);
7765 break;
7766
7767 case SWITCH_TYPE_DESCRIPTIVE:
7768 if (name == NULL)
7769 {
7770 /* Distinguish between invocations where name is NULL. */
7771 static bool started = false;
7772
7773 if (!started)
7774 {
7775 section * sec;
7776
7777 sec = get_section (targetm.asm_out.record_gcc_switches_section,
7778 SECTION_DEBUG
7779 | SECTION_MERGE
7780 | SECTION_STRINGS
7781 | (SECTION_ENTSIZE & 1),
7782 NULL);
7783 switch_to_section (sec);
7784 started = true;
7785 }
7786 }
7787
7788 default:
7789 break;
7790 }
7791
7792 /* The return value is currently ignored by the caller, but must be 0.
7793 For -fverbose-asm the return value would be the number of characters
7794 emitted into the assembler file. */
7795 return 0;
7796 }
7797
7798 /* Emit text to declare externally defined symbols. It is needed to
7799 properly support non-default visibility. */
7800 void
7801 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
7802 tree decl,
7803 const char *name ATTRIBUTE_UNUSED)
7804 {
7805 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
7806 set in order to avoid putting out names that are never really
7807 used. Always output visibility specified in the source. */
7808 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
7809 && (DECL_VISIBILITY_SPECIFIED (decl)
7810 || targetm.binds_local_p (decl)))
7811 maybe_assemble_visibility (decl);
7812 }
7813
7814 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */
7815
7816 void
7817 default_asm_output_source_filename (FILE *file, const char *name)
7818 {
7819 #ifdef ASM_OUTPUT_SOURCE_FILENAME
7820 ASM_OUTPUT_SOURCE_FILENAME (file, name);
7821 #else
7822 fprintf (file, "\t.file\t");
7823 output_quoted_string (file, name);
7824 putc ('\n', file);
7825 #endif
7826 }
7827
7828 /* Output a file name in the form wanted by System V. */
7829
7830 void
7831 output_file_directive (FILE *asm_file, const char *input_name)
7832 {
7833 int len;
7834 const char *na;
7835
7836 if (input_name == NULL)
7837 input_name = "<stdin>";
7838 else
7839 input_name = remap_debug_filename (input_name);
7840
7841 len = strlen (input_name);
7842 na = input_name + len;
7843
7844 /* NA gets INPUT_NAME sans directory names. */
7845 while (na > input_name)
7846 {
7847 if (IS_DIR_SEPARATOR (na[-1]))
7848 break;
7849 na--;
7850 }
7851
7852 targetm.asm_out.output_source_filename (asm_file, na);
7853 }
7854
7855 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression
7856 EXP. */
7857 rtx
7858 make_debug_expr_from_rtl (const_rtx exp)
7859 {
7860 tree ddecl = make_node (DEBUG_EXPR_DECL), type;
7861 machine_mode mode = GET_MODE (exp);
7862 rtx dval;
7863
7864 DECL_ARTIFICIAL (ddecl) = 1;
7865 if (REG_P (exp) && REG_EXPR (exp))
7866 type = TREE_TYPE (REG_EXPR (exp));
7867 else if (MEM_P (exp) && MEM_EXPR (exp))
7868 type = TREE_TYPE (MEM_EXPR (exp));
7869 else
7870 type = NULL_TREE;
7871 if (type && TYPE_MODE (type) == mode)
7872 TREE_TYPE (ddecl) = type;
7873 else
7874 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1);
7875 SET_DECL_MODE (ddecl, mode);
7876 dval = gen_rtx_DEBUG_EXPR (mode);
7877 DEBUG_EXPR_TREE_DECL (dval) = ddecl;
7878 SET_DECL_RTL (ddecl, dval);
7879 return dval;
7880 }
7881
7882 #ifdef ELF_ASCII_ESCAPES
7883 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */
7884
7885 void
7886 default_elf_asm_output_limited_string (FILE *f, const char *s)
7887 {
7888 int escape;
7889 unsigned char c;
7890
7891 fputs (STRING_ASM_OP, f);
7892 putc ('"', f);
7893 while (*s != '\0')
7894 {
7895 c = *s;
7896 escape = ELF_ASCII_ESCAPES[c];
7897 switch (escape)
7898 {
7899 case 0:
7900 putc (c, f);
7901 break;
7902 case 1:
7903 putc ('\\', f);
7904 putc ('0'+((c>>6)&7), f);
7905 putc ('0'+((c>>3)&7), f);
7906 putc ('0'+(c&7), f);
7907 break;
7908 default:
7909 putc ('\\', f);
7910 putc (escape, f);
7911 break;
7912 }
7913 s++;
7914 }
7915 putc ('\"', f);
7916 putc ('\n', f);
7917 }
7918
7919 /* Default ASM_OUTPUT_ASCII for ELF targets. */
7920
7921 void
7922 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len)
7923 {
7924 const char *limit = s + len;
7925 const char *last_null = NULL;
7926 unsigned bytes_in_chunk = 0;
7927 unsigned char c;
7928 int escape;
7929
7930 for (; s < limit; s++)
7931 {
7932 const char *p;
7933
7934 if (bytes_in_chunk >= 60)
7935 {
7936 putc ('\"', f);
7937 putc ('\n', f);
7938 bytes_in_chunk = 0;
7939 }
7940
7941 if (s > last_null)
7942 {
7943 for (p = s; p < limit && *p != '\0'; p++)
7944 continue;
7945 last_null = p;
7946 }
7947 else
7948 p = last_null;
7949
7950 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT)
7951 {
7952 if (bytes_in_chunk > 0)
7953 {
7954 putc ('\"', f);
7955 putc ('\n', f);
7956 bytes_in_chunk = 0;
7957 }
7958
7959 default_elf_asm_output_limited_string (f, s);
7960 s = p;
7961 }
7962 else
7963 {
7964 if (bytes_in_chunk == 0)
7965 fputs (ASCII_DATA_ASM_OP "\"", f);
7966
7967 c = *s;
7968 escape = ELF_ASCII_ESCAPES[c];
7969 switch (escape)
7970 {
7971 case 0:
7972 putc (c, f);
7973 bytes_in_chunk++;
7974 break;
7975 case 1:
7976 putc ('\\', f);
7977 putc ('0'+((c>>6)&7), f);
7978 putc ('0'+((c>>3)&7), f);
7979 putc ('0'+(c&7), f);
7980 bytes_in_chunk += 4;
7981 break;
7982 default:
7983 putc ('\\', f);
7984 putc (escape, f);
7985 bytes_in_chunk += 2;
7986 break;
7987 }
7988
7989 }
7990 }
7991
7992 if (bytes_in_chunk > 0)
7993 {
7994 putc ('\"', f);
7995 putc ('\n', f);
7996 }
7997 }
7998 #endif
7999
8000 static GTY(()) section *elf_init_array_section;
8001 static GTY(()) section *elf_fini_array_section;
8002
8003 static section *
8004 get_elf_initfini_array_priority_section (int priority,
8005 bool constructor_p)
8006 {
8007 section *sec;
8008 if (priority != DEFAULT_INIT_PRIORITY)
8009 {
8010 char buf[18];
8011 sprintf (buf, "%s.%.5u",
8012 constructor_p ? ".init_array" : ".fini_array",
8013 priority);
8014 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8015 }
8016 else
8017 {
8018 if (constructor_p)
8019 {
8020 if (elf_init_array_section == NULL)
8021 elf_init_array_section
8022 = get_section (".init_array",
8023 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8024 sec = elf_init_array_section;
8025 }
8026 else
8027 {
8028 if (elf_fini_array_section == NULL)
8029 elf_fini_array_section
8030 = get_section (".fini_array",
8031 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8032 sec = elf_fini_array_section;
8033 }
8034 }
8035 return sec;
8036 }
8037
8038 /* Use .init_array section for constructors. */
8039
8040 void
8041 default_elf_init_array_asm_out_constructor (rtx symbol, int priority)
8042 {
8043 section *sec = get_elf_initfini_array_priority_section (priority,
8044 true);
8045 assemble_addr_to_section (symbol, sec);
8046 }
8047
8048 /* Use .fini_array section for destructors. */
8049
8050 void
8051 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority)
8052 {
8053 section *sec = get_elf_initfini_array_priority_section (priority,
8054 false);
8055 assemble_addr_to_section (symbol, sec);
8056 }
8057
8058 /* Default TARGET_ASM_OUTPUT_IDENT hook.
8059
8060 This is a bit of a cheat. The real default is a no-op, but this
8061 hook is the default for all targets with a .ident directive. */
8062
8063 void
8064 default_asm_output_ident_directive (const char *ident_str)
8065 {
8066 const char *ident_asm_op = "\t.ident\t";
8067
8068 /* If we are still in the front end, do not write out the string
8069 to asm_out_file. Instead, add a fake top-level asm statement.
8070 This allows the front ends to use this hook without actually
8071 writing to asm_out_file, to handle #ident or Pragma Ident. */
8072 if (symtab->state == PARSING)
8073 {
8074 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL));
8075 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf));
8076 }
8077 else
8078 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str);
8079 }
8080
8081
8082 /* This function ensures that vtable_map variables are not only
8083 in the comdat section, but that each variable has its own unique
8084 comdat name. Without this the variables end up in the same section
8085 with a single comdat name.
8086
8087 FIXME: resolve_unique_section needs to deal better with
8088 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once
8089 that is fixed, this if-else statement can be replaced with
8090 a single call to "switch_to_section (sect)". */
8091
8092 static void
8093 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED)
8094 {
8095 #if defined (OBJECT_FORMAT_ELF)
8096 targetm.asm_out.named_section (sect->named.name,
8097 sect->named.common.flags
8098 | SECTION_LINKONCE,
8099 DECL_NAME (decl));
8100 in_section = sect;
8101 #else
8102 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here.
8103 Therefore the following check is used.
8104 In case a the target is PE or COFF a comdat group section
8105 is created, e.g. .vtable_map_vars$foo. The linker places
8106 everything in .vtable_map_vars at the end.
8107
8108 A fix could be made in
8109 gcc/config/i386/winnt.c: i386_pe_unique_section. */
8110 if (TARGET_PECOFF)
8111 {
8112 char *name;
8113
8114 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE)
8115 name = ACONCAT ((sect->named.name, "$",
8116 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL));
8117 else
8118 name = ACONCAT ((sect->named.name, "$",
8119 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))),
8120 NULL));
8121
8122 targetm.asm_out.named_section (name,
8123 sect->named.common.flags
8124 | SECTION_LINKONCE,
8125 DECL_NAME (decl));
8126 in_section = sect;
8127 }
8128 else
8129 switch_to_section (sect);
8130 #endif
8131 }
8132
8133 #include "gt-varasm.h"