]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/varasm.c
Turn -fsemantic-interposition to optimization flag
[thirdparty/gcc.git] / gcc / varasm.c
1 /* Output variables, constants and external declarations, for GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file handles generation of all the assembler code
22 *except* the instructions of a function.
23 This includes declarations of variables and their initial values.
24
25 We also output the assembler code for constants stored in memory
26 and are responsible for combining constants with the same value. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "predict.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "stringpool.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "cgraph.h"
42 #include "diagnostic-core.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "version.h"
47 #include "flags.h"
48 #include "stmt.h"
49 #include "expr.h"
50 #include "expmed.h"
51 #include "optabs.h"
52 #include "output.h"
53 #include "langhooks.h"
54 #include "debug.h"
55 #include "common/common-target.h"
56 #include "stringpool.h"
57 #include "attribs.h"
58 #include "asan.h"
59 #include "rtl-iter.h"
60 #include "file-prefix-map.h" /* remap_debug_filename() */
61 #include "alloc-pool.h"
62 #include "toplev.h"
63 #include "opts.h"
64
65 #ifdef XCOFF_DEBUGGING_INFO
66 #include "xcoffout.h" /* Needed for external data declarations. */
67 #endif
68
69 /* The (assembler) name of the first globally-visible object output. */
70 extern GTY(()) const char *first_global_object_name;
71 extern GTY(()) const char *weak_global_object_name;
72
73 const char *first_global_object_name;
74 const char *weak_global_object_name;
75
76 class addr_const;
77 class constant_descriptor_rtx;
78 struct rtx_constant_pool;
79
80 #define n_deferred_constants (crtl->varasm.deferred_constants)
81
82 /* Number for making the label on the next
83 constant that is stored in memory. */
84
85 static GTY(()) int const_labelno;
86
87 /* Carry information from ASM_DECLARE_OBJECT_NAME
88 to ASM_FINISH_DECLARE_OBJECT. */
89
90 int size_directive_output;
91
92 /* The last decl for which assemble_variable was called,
93 if it did ASM_DECLARE_OBJECT_NAME.
94 If the last call to assemble_variable didn't do that,
95 this holds 0. */
96
97 tree last_assemble_variable_decl;
98
99 /* The following global variable indicates if the first basic block
100 in a function belongs to the cold partition or not. */
101
102 bool first_function_block_is_cold;
103
104 /* Whether we saw any functions with no_split_stack. */
105
106 static bool saw_no_split_stack;
107
108 static const char *strip_reg_name (const char *);
109 static int contains_pointers_p (tree);
110 #ifdef ASM_OUTPUT_EXTERNAL
111 static bool incorporeal_function_p (tree);
112 #endif
113 static void decode_addr_const (tree, class addr_const *);
114 static hashval_t const_hash_1 (const tree);
115 static int compare_constant (const tree, const tree);
116 static void output_constant_def_contents (rtx);
117 static void output_addressed_constants (tree, int);
118 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
119 unsigned int, bool, bool);
120 static void globalize_decl (tree);
121 static bool decl_readonly_section_1 (enum section_category);
122 #ifdef BSS_SECTION_ASM_OP
123 #ifdef ASM_OUTPUT_ALIGNED_BSS
124 static void asm_output_aligned_bss (FILE *, tree, const char *,
125 unsigned HOST_WIDE_INT, int)
126 ATTRIBUTE_UNUSED;
127 #endif
128 #endif /* BSS_SECTION_ASM_OP */
129 static void mark_weak (tree);
130 static void output_constant_pool (const char *, tree);
131 static void handle_vtv_comdat_section (section *, const_tree);
132 \f
133 /* Well-known sections, each one associated with some sort of *_ASM_OP. */
134 section *text_section;
135 section *data_section;
136 section *readonly_data_section;
137 section *sdata_section;
138 section *ctors_section;
139 section *dtors_section;
140 section *bss_section;
141 section *sbss_section;
142
143 /* Various forms of common section. All are guaranteed to be nonnull. */
144 section *tls_comm_section;
145 section *comm_section;
146 section *lcomm_section;
147
148 /* A SECTION_NOSWITCH section used for declaring global BSS variables.
149 May be null. */
150 section *bss_noswitch_section;
151
152 /* The section that holds the main exception table, when known. The section
153 is set either by the target's init_sections hook or by the first call to
154 switch_to_exception_section. */
155 section *exception_section;
156
157 /* The section that holds the DWARF2 frame unwind information, when known.
158 The section is set either by the target's init_sections hook or by the
159 first call to switch_to_eh_frame_section. */
160 section *eh_frame_section;
161
162 /* asm_out_file's current section. This is NULL if no section has yet
163 been selected or if we lose track of what the current section is. */
164 section *in_section;
165
166 /* True if code for the current function is currently being directed
167 at the cold section. */
168 bool in_cold_section_p;
169
170 /* The following global holds the "function name" for the code in the
171 cold section of a function, if hot/cold function splitting is enabled
172 and there was actually code that went into the cold section. A
173 pseudo function name is needed for the cold section of code for some
174 debugging tools that perform symbolization. */
175 tree cold_function_name = NULL_TREE;
176
177 /* A linked list of all the unnamed sections. */
178 static GTY(()) section *unnamed_sections;
179
180 /* Return a nonzero value if DECL has a section attribute. */
181 #define IN_NAMED_SECTION(DECL) \
182 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL)
183
184 struct section_hasher : ggc_ptr_hash<section>
185 {
186 typedef const char *compare_type;
187
188 static hashval_t hash (section *);
189 static bool equal (section *, const char *);
190 };
191
192 /* Hash table of named sections. */
193 static GTY(()) hash_table<section_hasher> *section_htab;
194
195 struct object_block_hasher : ggc_ptr_hash<object_block>
196 {
197 typedef const section *compare_type;
198
199 static hashval_t hash (object_block *);
200 static bool equal (object_block *, const section *);
201 };
202
203 /* A table of object_blocks, indexed by section. */
204 static GTY(()) hash_table<object_block_hasher> *object_block_htab;
205
206 /* The next number to use for internal anchor labels. */
207 static GTY(()) int anchor_labelno;
208
209 /* A pool of constants that can be shared between functions. */
210 static GTY(()) struct rtx_constant_pool *shared_constant_pool;
211
212 /* Helper routines for maintaining section_htab. */
213
214 bool
215 section_hasher::equal (section *old, const char *new_name)
216 {
217 return strcmp (old->named.name, new_name) == 0;
218 }
219
220 hashval_t
221 section_hasher::hash (section *old)
222 {
223 return htab_hash_string (old->named.name);
224 }
225
226 /* Return a hash value for section SECT. */
227
228 static hashval_t
229 hash_section (section *sect)
230 {
231 if (sect->common.flags & SECTION_NAMED)
232 return htab_hash_string (sect->named.name);
233 return sect->common.flags & ~SECTION_DECLARED;
234 }
235
236 /* Helper routines for maintaining object_block_htab. */
237
238 inline bool
239 object_block_hasher::equal (object_block *old, const section *new_section)
240 {
241 return old->sect == new_section;
242 }
243
244 hashval_t
245 object_block_hasher::hash (object_block *old)
246 {
247 return hash_section (old->sect);
248 }
249
250 /* Return a new unnamed section with the given fields. */
251
252 section *
253 get_unnamed_section (unsigned int flags, void (*callback) (const void *),
254 const void *data)
255 {
256 section *sect;
257
258 sect = ggc_alloc<section> ();
259 sect->unnamed.common.flags = flags | SECTION_UNNAMED;
260 sect->unnamed.callback = callback;
261 sect->unnamed.data = data;
262 sect->unnamed.next = unnamed_sections;
263
264 unnamed_sections = sect;
265 return sect;
266 }
267
268 /* Return a SECTION_NOSWITCH section with the given fields. */
269
270 static section *
271 get_noswitch_section (unsigned int flags, noswitch_section_callback callback)
272 {
273 section *sect;
274
275 sect = ggc_alloc<section> ();
276 sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
277 sect->noswitch.callback = callback;
278
279 return sect;
280 }
281
282 /* Return the named section structure associated with NAME. Create
283 a new section with the given fields if no such structure exists.
284 When NOT_EXISTING, then fail if the section already exists. Return
285 the existing section if the SECTION_RETAIN bit doesn't match. Set
286 the SECTION_WRITE | SECTION_RELRO bits on the the existing section
287 if one of the section flags is SECTION_WRITE | SECTION_RELRO and the
288 other has none of these flags in named sections and either the section
289 hasn't been declared yet or has been declared as writable. */
290
291 section *
292 get_section (const char *name, unsigned int flags, tree decl,
293 bool not_existing)
294 {
295 section *sect, **slot;
296
297 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name),
298 INSERT);
299 flags |= SECTION_NAMED;
300 if (decl != nullptr
301 && DECL_P (decl)
302 && lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
303 flags |= SECTION_RETAIN;
304 if (*slot == NULL)
305 {
306 sect = ggc_alloc<section> ();
307 sect->named.common.flags = flags;
308 sect->named.name = ggc_strdup (name);
309 sect->named.decl = decl;
310 *slot = sect;
311 }
312 else
313 {
314 if (not_existing)
315 internal_error ("Section already exists: %qs", name);
316
317 sect = *slot;
318 /* It is fine if one of the sections has SECTION_NOTYPE as long as
319 the other has none of the contrary flags (see the logic at the end
320 of default_section_type_flags, below). */
321 if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
322 && !((sect->common.flags | flags)
323 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
324 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
325 {
326 sect->common.flags |= SECTION_NOTYPE;
327 flags |= SECTION_NOTYPE;
328 }
329 if ((sect->common.flags & ~SECTION_DECLARED) != flags
330 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
331 {
332 /* It is fine if one of the section flags is
333 SECTION_WRITE | SECTION_RELRO and the other has none of these
334 flags (i.e. read-only) in named sections and either the
335 section hasn't been declared yet or has been declared as writable.
336 In that case just make sure the resulting flags are
337 SECTION_WRITE | SECTION_RELRO, ie. writable only because of
338 relocations. */
339 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO))
340 == (SECTION_WRITE | SECTION_RELRO)
341 && (sect->common.flags
342 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO))
343 == (flags & ~(SECTION_WRITE | SECTION_RELRO))
344 && ((sect->common.flags & SECTION_DECLARED) == 0
345 || (sect->common.flags & SECTION_WRITE)))
346 {
347 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO);
348 return sect;
349 }
350 /* If the SECTION_RETAIN bit doesn't match, return and switch
351 to a new section later. */
352 if ((sect->common.flags & SECTION_RETAIN)
353 != (flags & SECTION_RETAIN))
354 return sect;
355 /* Sanity check user variables for flag changes. */
356 if (sect->named.decl != NULL
357 && DECL_P (sect->named.decl)
358 && decl != sect->named.decl)
359 {
360 if (decl != NULL && DECL_P (decl))
361 error ("%+qD causes a section type conflict with %qD",
362 decl, sect->named.decl);
363 else
364 error ("section type conflict with %qD", sect->named.decl);
365 inform (DECL_SOURCE_LOCATION (sect->named.decl),
366 "%qD was declared here", sect->named.decl);
367 }
368 else if (decl != NULL && DECL_P (decl))
369 error ("%+qD causes a section type conflict", decl);
370 else
371 error ("section type conflict");
372 /* Make sure we don't error about one section multiple times. */
373 sect->common.flags |= SECTION_OVERRIDE;
374 }
375 }
376 return sect;
377 }
378
379 /* Return true if the current compilation mode benefits from having
380 objects grouped into blocks. */
381
382 static bool
383 use_object_blocks_p (void)
384 {
385 return flag_section_anchors;
386 }
387
388 /* Return the object_block structure for section SECT. Create a new
389 structure if we haven't created one already. Return null if SECT
390 itself is null. Return also null for mergeable sections since
391 section anchors can't be used in mergeable sections anyway,
392 because the linker might move objects around, and using the
393 object blocks infrastructure in that case is both a waste and a
394 maintenance burden. */
395
396 static struct object_block *
397 get_block_for_section (section *sect)
398 {
399 struct object_block *block;
400
401 if (sect == NULL)
402 return NULL;
403
404 if (sect->common.flags & SECTION_MERGE)
405 return NULL;
406
407 object_block **slot
408 = object_block_htab->find_slot_with_hash (sect, hash_section (sect),
409 INSERT);
410 block = *slot;
411 if (block == NULL)
412 {
413 block = ggc_cleared_alloc<object_block> ();
414 block->sect = sect;
415 *slot = block;
416 }
417 return block;
418 }
419
420 /* Create a symbol with label LABEL and place it at byte offset
421 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
422 is not yet known. LABEL must be a garbage-collected string. */
423
424 static rtx
425 create_block_symbol (const char *label, struct object_block *block,
426 HOST_WIDE_INT offset)
427 {
428 rtx symbol;
429 unsigned int size;
430
431 /* Create the extended SYMBOL_REF. */
432 size = RTX_HDR_SIZE + sizeof (struct block_symbol);
433 symbol = (rtx) ggc_internal_alloc (size);
434
435 /* Initialize the normal SYMBOL_REF fields. */
436 memset (symbol, 0, size);
437 PUT_CODE (symbol, SYMBOL_REF);
438 PUT_MODE (symbol, Pmode);
439 XSTR (symbol, 0) = label;
440 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO;
441
442 /* Initialize the block_symbol stuff. */
443 SYMBOL_REF_BLOCK (symbol) = block;
444 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
445
446 return symbol;
447 }
448
449 /* Return a section with a particular name and with whatever SECTION_*
450 flags section_type_flags deems appropriate. The name of the section
451 is taken from NAME if nonnull, otherwise it is taken from DECL's
452 DECL_SECTION_NAME. DECL is the decl associated with the section
453 (see the section comment for details) and RELOC is as for
454 section_type_flags. */
455
456 section *
457 get_named_section (tree decl, const char *name, int reloc)
458 {
459 unsigned int flags;
460
461 if (name == NULL)
462 {
463 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl));
464 name = DECL_SECTION_NAME (decl);
465 }
466
467 flags = targetm.section_type_flags (decl, name, reloc);
468 return get_section (name, flags, decl);
469 }
470
471 /* Worker for resolve_unique_section. */
472
473 static bool
474 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
475 {
476 n->implicit_section = true;
477 return false;
478 }
479
480 /* If required, set DECL_SECTION_NAME to a unique name. */
481
482 void
483 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
484 int flag_function_or_data_sections)
485 {
486 if (DECL_SECTION_NAME (decl) == NULL
487 && targetm_common.have_named_sections
488 && (flag_function_or_data_sections
489 || lookup_attribute ("retain", DECL_ATTRIBUTES (decl))
490 || DECL_COMDAT_GROUP (decl)))
491 {
492 targetm.asm_out.unique_section (decl, reloc);
493 if (DECL_SECTION_NAME (decl))
494 symtab_node::get (decl)->call_for_symbol_and_aliases
495 (set_implicit_section, NULL, true);
496 }
497 }
498
499 #ifdef BSS_SECTION_ASM_OP
500
501 #ifdef ASM_OUTPUT_ALIGNED_BSS
502
503 /* Utility function for targets to use in implementing
504 ASM_OUTPUT_ALIGNED_BSS.
505 ??? It is believed that this function will work in most cases so such
506 support is localized here. */
507
508 static void
509 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
510 const char *name, unsigned HOST_WIDE_INT size,
511 int align)
512 {
513 switch_to_section (bss_section);
514 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
515 #ifdef ASM_DECLARE_OBJECT_NAME
516 last_assemble_variable_decl = decl;
517 ASM_DECLARE_OBJECT_NAME (file, name, decl);
518 #else
519 /* Standard thing is just output label for the object. */
520 ASM_OUTPUT_LABEL (file, name);
521 #endif /* ASM_DECLARE_OBJECT_NAME */
522 ASM_OUTPUT_SKIP (file, size ? size : 1);
523 }
524
525 #endif
526
527 #endif /* BSS_SECTION_ASM_OP */
528
529 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS
530 /* Return the hot section for function DECL. Return text_section for
531 null DECLs. */
532
533 static section *
534 hot_function_section (tree decl)
535 {
536 if (decl != NULL_TREE
537 && DECL_SECTION_NAME (decl) != NULL
538 && targetm_common.have_named_sections)
539 return get_named_section (decl, NULL, 0);
540 else
541 return text_section;
542 }
543 #endif
544
545 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL)
546 is NULL.
547
548 When DECL_SECTION_NAME is non-NULL and it is implicit section and
549 NAMED_SECTION_SUFFIX is non-NULL, then produce section called
550 concatenate the name with NAMED_SECTION_SUFFIX.
551 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */
552
553 section *
554 get_named_text_section (tree decl,
555 const char *text_section_name,
556 const char *named_section_suffix)
557 {
558 if (decl && DECL_SECTION_NAME (decl))
559 {
560 if (named_section_suffix)
561 {
562 const char *dsn = DECL_SECTION_NAME (decl);
563 const char *stripped_name;
564 char *name, *buffer;
565
566 name = (char *) alloca (strlen (dsn) + 1);
567 memcpy (name, dsn,
568 strlen (dsn) + 1);
569
570 stripped_name = targetm.strip_name_encoding (name);
571
572 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
573 return get_named_section (decl, buffer, 0);
574 }
575 else if (symtab_node::get (decl)->implicit_section)
576 {
577 const char *name;
578
579 /* Do not try to split gnu_linkonce functions. This gets somewhat
580 slipperly. */
581 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP)
582 return NULL;
583 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
584 name = targetm.strip_name_encoding (name);
585 return get_named_section (decl, ACONCAT ((text_section_name, ".",
586 name, NULL)), 0);
587 }
588 else
589 return NULL;
590 }
591 return get_named_section (decl, text_section_name, 0);
592 }
593
594 /* Choose named function section based on its frequency. */
595
596 section *
597 default_function_section (tree decl, enum node_frequency freq,
598 bool startup, bool exit)
599 {
600 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG
601 /* Old GNU linkers have buggy --gc-section support, which sometimes
602 results in .gcc_except_table* sections being garbage collected. */
603 if (decl
604 && symtab_node::get (decl)->implicit_section)
605 return NULL;
606 #endif
607
608 if (!flag_reorder_functions
609 || !targetm_common.have_named_sections)
610 return NULL;
611 /* Startup code should go to startup subsection unless it is
612 unlikely executed (this happens especially with function splitting
613 where we can split away unnecessary parts of static constructors. */
614 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
615 {
616 /* During LTO the tp_first_run profiling will naturally place all
617 initialization code first. Using separate section is counter-productive
618 because startup only code may call functions which are no longer
619 startup only. */
620 if (!in_lto_p
621 || !cgraph_node::get (decl)->tp_first_run
622 || !opt_for_fn (decl, flag_profile_reorder_functions))
623 return get_named_text_section (decl, ".text.startup", NULL);
624 else
625 return NULL;
626 }
627
628 /* Similarly for exit. */
629 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
630 return get_named_text_section (decl, ".text.exit", NULL);
631
632 /* Group cold functions together, similarly for hot code. */
633 switch (freq)
634 {
635 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
636 return get_named_text_section (decl, ".text.unlikely", NULL);
637 case NODE_FREQUENCY_HOT:
638 return get_named_text_section (decl, ".text.hot", NULL);
639 /* FALLTHRU */
640 default:
641 return NULL;
642 }
643 }
644
645 /* Return the section for function DECL.
646
647 If DECL is NULL_TREE, return the text section. We can be passed
648 NULL_TREE under some circumstances by dbxout.c at least.
649
650 If FORCE_COLD is true, return cold function section ignoring
651 the frequency info of cgraph_node. */
652
653 static section *
654 function_section_1 (tree decl, bool force_cold)
655 {
656 section *section = NULL;
657 enum node_frequency freq = NODE_FREQUENCY_NORMAL;
658 bool startup = false, exit = false;
659
660 if (decl)
661 {
662 struct cgraph_node *node = cgraph_node::get (decl);
663
664 if (node)
665 {
666 freq = node->frequency;
667 startup = node->only_called_at_startup;
668 exit = node->only_called_at_exit;
669 }
670 }
671 if (force_cold)
672 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED;
673
674 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS
675 if (decl != NULL_TREE
676 && DECL_SECTION_NAME (decl) != NULL)
677 {
678 if (targetm.asm_out.function_section)
679 section = targetm.asm_out.function_section (decl, freq,
680 startup, exit);
681 if (section)
682 return section;
683 return get_named_section (decl, NULL, 0);
684 }
685 else
686 return targetm.asm_out.select_section
687 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED,
688 symtab_node::get (decl)->definition_alignment ());
689 #else
690 if (targetm.asm_out.function_section)
691 section = targetm.asm_out.function_section (decl, freq, startup, exit);
692 if (section)
693 return section;
694 return hot_function_section (decl);
695 #endif
696 }
697
698 /* Return the section for function DECL.
699
700 If DECL is NULL_TREE, return the text section. We can be passed
701 NULL_TREE under some circumstances by dbxout.c at least. */
702
703 section *
704 function_section (tree decl)
705 {
706 /* Handle cases where function splitting code decides
707 to put function entry point into unlikely executed section
708 despite the fact that the function itself is not cold
709 (i.e. it is called rarely but contains a hot loop that is
710 better to live in hot subsection for the code locality). */
711 return function_section_1 (decl,
712 first_function_block_is_cold);
713 }
714
715 /* Return the section for the current function, take IN_COLD_SECTION_P
716 into account. */
717
718 section *
719 current_function_section (void)
720 {
721 return function_section_1 (current_function_decl, in_cold_section_p);
722 }
723
724 /* Tell assembler to switch to unlikely-to-be-executed text section. */
725
726 section *
727 unlikely_text_section (void)
728 {
729 return function_section_1 (current_function_decl, true);
730 }
731
732 /* When called within a function context, return true if the function
733 has been assigned a cold text section and if SECT is that section.
734 When called outside a function context, return true if SECT is the
735 default cold section. */
736
737 bool
738 unlikely_text_section_p (section *sect)
739 {
740 return sect == function_section_1 (current_function_decl, true);
741 }
742
743 /* Switch to the other function partition (if inside of hot section
744 into cold section, otherwise into the hot section). */
745
746 void
747 switch_to_other_text_partition (void)
748 {
749 in_cold_section_p = !in_cold_section_p;
750 switch_to_section (current_function_section ());
751 }
752
753 /* Return the read-only or relocated read-only data section
754 associated with function DECL. */
755
756 section *
757 default_function_rodata_section (tree decl, bool relocatable)
758 {
759 const char* sname;
760 unsigned int flags;
761
762 flags = 0;
763
764 if (relocatable)
765 {
766 sname = ".data.rel.ro.local";
767 flags = (SECTION_WRITE | SECTION_RELRO);
768 }
769 else
770 sname = ".rodata";
771
772 if (decl && DECL_SECTION_NAME (decl))
773 {
774 const char *name = DECL_SECTION_NAME (decl);
775
776 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP)
777 {
778 const char *dot;
779 size_t len;
780 char* rname;
781
782 dot = strchr (name + 1, '.');
783 if (!dot)
784 dot = name;
785 len = strlen (dot) + strlen (sname) + 1;
786 rname = (char *) alloca (len);
787
788 strcpy (rname, sname);
789 strcat (rname, dot);
790 return get_section (rname, (SECTION_LINKONCE | flags), decl);
791 }
792 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo or
793 .gnu.linkonce.d.rel.ro.local.foo if the jump table is relocatable. */
794 else if (DECL_COMDAT_GROUP (decl)
795 && startswith (name, ".gnu.linkonce.t."))
796 {
797 size_t len;
798 char *rname;
799
800 if (relocatable)
801 {
802 len = strlen (name) + strlen (".rel.ro.local") + 1;
803 rname = (char *) alloca (len);
804
805 strcpy (rname, ".gnu.linkonce.d.rel.ro.local");
806 strcat (rname, name + 15);
807 }
808 else
809 {
810 len = strlen (name) + 1;
811 rname = (char *) alloca (len);
812
813 memcpy (rname, name, len);
814 rname[14] = 'r';
815 }
816 return get_section (rname, (SECTION_LINKONCE | flags), decl);
817 }
818 /* For .text.foo we want to use .rodata.foo. */
819 else if (flag_function_sections && flag_data_sections
820 && startswith (name, ".text."))
821 {
822 size_t len = strlen (name) + 1;
823 char *rname = (char *) alloca (len + strlen (sname) - 5);
824
825 memcpy (rname, sname, strlen (sname));
826 memcpy (rname + strlen (sname), name + 5, len - 5);
827 return get_section (rname, flags, decl);
828 }
829 }
830
831 if (relocatable)
832 return get_section (sname, flags, decl);
833 else
834 return readonly_data_section;
835 }
836
837 /* Return the read-only data section associated with function DECL
838 for targets where that section should be always the single
839 readonly data section. */
840
841 section *
842 default_no_function_rodata_section (tree, bool)
843 {
844 return readonly_data_section;
845 }
846
847 /* A subroutine of mergeable_string_section and mergeable_constant_section. */
848
849 static const char *
850 function_mergeable_rodata_prefix (void)
851 {
852 section *s = targetm.asm_out.function_rodata_section (current_function_decl,
853 false);
854 if (SECTION_STYLE (s) == SECTION_NAMED)
855 return s->named.name;
856 else
857 return targetm.asm_out.mergeable_rodata_prefix;
858 }
859
860 /* Return the section to use for string merging. */
861
862 static section *
863 mergeable_string_section (tree decl ATTRIBUTE_UNUSED,
864 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
865 unsigned int flags ATTRIBUTE_UNUSED)
866 {
867 HOST_WIDE_INT len;
868
869 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
870 && TREE_CODE (decl) == STRING_CST
871 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
872 && align <= 256
873 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
874 && TREE_STRING_LENGTH (decl) == len)
875 {
876 scalar_int_mode mode;
877 unsigned int modesize;
878 const char *str;
879 HOST_WIDE_INT i;
880 int j, unit;
881 const char *prefix = function_mergeable_rodata_prefix ();
882 char *name = (char *) alloca (strlen (prefix) + 30);
883
884 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
885 modesize = GET_MODE_BITSIZE (mode);
886 if (modesize >= 8 && modesize <= 256
887 && (modesize & (modesize - 1)) == 0)
888 {
889 if (align < modesize)
890 align = modesize;
891
892 if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8)
893 return readonly_data_section;
894
895 str = TREE_STRING_POINTER (decl);
896 unit = GET_MODE_SIZE (mode);
897
898 /* Check for embedded NUL characters. */
899 for (i = 0; i < len; i += unit)
900 {
901 for (j = 0; j < unit; j++)
902 if (str[i + j] != '\0')
903 break;
904 if (j == unit)
905 break;
906 }
907 if (i == len - unit || (unit == 1 && i == len))
908 {
909 sprintf (name, "%s.str%d.%d", prefix,
910 modesize / 8, (int) (align / 8));
911 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS;
912 return get_section (name, flags, NULL);
913 }
914 }
915 }
916
917 return readonly_data_section;
918 }
919
920 /* Return the section to use for constant merging. */
921
922 section *
923 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
924 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
925 unsigned int flags ATTRIBUTE_UNUSED)
926 {
927 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
928 && mode != VOIDmode
929 && mode != BLKmode
930 && known_le (GET_MODE_BITSIZE (mode), align)
931 && align >= 8
932 && align <= 256
933 && (align & (align - 1)) == 0
934 && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8))
935 {
936 const char *prefix = function_mergeable_rodata_prefix ();
937 char *name = (char *) alloca (strlen (prefix) + 30);
938
939 sprintf (name, "%s.cst%d", prefix, (int) (align / 8));
940 flags |= (align / 8) | SECTION_MERGE;
941 return get_section (name, flags, NULL);
942 }
943 return readonly_data_section;
944 }
945 \f
946 /* Given NAME, a putative register name, discard any customary prefixes. */
947
948 static const char *
949 strip_reg_name (const char *name)
950 {
951 #ifdef REGISTER_PREFIX
952 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX)))
953 name += strlen (REGISTER_PREFIX);
954 #endif
955 if (name[0] == '%' || name[0] == '#')
956 name++;
957 return name;
958 }
959 \f
960 /* The user has asked for a DECL to have a particular name. Set (or
961 change) it in such a way that we don't prefix an underscore to
962 it. */
963 void
964 set_user_assembler_name (tree decl, const char *name)
965 {
966 char *starred = (char *) alloca (strlen (name) + 2);
967 starred[0] = '*';
968 strcpy (starred + 1, name);
969 symtab->change_decl_assembler_name (decl, get_identifier (starred));
970 SET_DECL_RTL (decl, NULL_RTX);
971 }
972 \f
973 /* Decode an `asm' spec for a declaration as a register name.
974 Return the register number, or -1 if nothing specified,
975 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized,
976 or -3 if ASMSPEC is `cc' and is not recognized,
977 or -4 if ASMSPEC is `memory' and is not recognized.
978 Accept an exact spelling or a decimal number.
979 Prefixes such as % are optional. */
980
981 int
982 decode_reg_name_and_count (const char *asmspec, int *pnregs)
983 {
984 /* Presume just one register is clobbered. */
985 *pnregs = 1;
986
987 if (asmspec != 0)
988 {
989 int i;
990
991 /* Get rid of confusing prefixes. */
992 asmspec = strip_reg_name (asmspec);
993
994 /* Allow a decimal number as a "register name". */
995 for (i = strlen (asmspec) - 1; i >= 0; i--)
996 if (! ISDIGIT (asmspec[i]))
997 break;
998 if (asmspec[0] != 0 && i < 0)
999 {
1000 i = atoi (asmspec);
1001 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0])
1002 return i;
1003 else
1004 return -2;
1005 }
1006
1007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1008 if (reg_names[i][0]
1009 && ! strcmp (asmspec, strip_reg_name (reg_names[i])))
1010 return i;
1011
1012 #ifdef OVERLAPPING_REGISTER_NAMES
1013 {
1014 static const struct
1015 {
1016 const char *const name;
1017 const int number;
1018 const int nregs;
1019 } table[] = OVERLAPPING_REGISTER_NAMES;
1020
1021 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
1022 if (table[i].name[0]
1023 && ! strcmp (asmspec, table[i].name))
1024 {
1025 *pnregs = table[i].nregs;
1026 return table[i].number;
1027 }
1028 }
1029 #endif /* OVERLAPPING_REGISTER_NAMES */
1030
1031 #ifdef ADDITIONAL_REGISTER_NAMES
1032 {
1033 static const struct { const char *const name; const int number; } table[]
1034 = ADDITIONAL_REGISTER_NAMES;
1035
1036 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
1037 if (table[i].name[0]
1038 && ! strcmp (asmspec, table[i].name)
1039 && reg_names[table[i].number][0])
1040 return table[i].number;
1041 }
1042 #endif /* ADDITIONAL_REGISTER_NAMES */
1043
1044 if (!strcmp (asmspec, "memory"))
1045 return -4;
1046
1047 if (!strcmp (asmspec, "cc"))
1048 return -3;
1049
1050 return -2;
1051 }
1052
1053 return -1;
1054 }
1055
1056 int
1057 decode_reg_name (const char *name)
1058 {
1059 int count;
1060 return decode_reg_name_and_count (name, &count);
1061 }
1062
1063 \f
1064 /* Return true if DECL's initializer is suitable for a BSS section. */
1065
1066 bool
1067 bss_initializer_p (const_tree decl, bool named)
1068 {
1069 /* Do not put non-common constants into the .bss section, they belong in
1070 a readonly section, except when NAMED is true. */
1071 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
1072 && (DECL_INITIAL (decl) == NULL
1073 /* In LTO we have no errors in program; error_mark_node is used
1074 to mark offlined constructors. */
1075 || (DECL_INITIAL (decl) == error_mark_node
1076 && !in_lto_p)
1077 || (flag_zero_initialized_in_bss
1078 && initializer_zerop (DECL_INITIAL (decl))
1079 /* A decl with the "persistent" attribute applied and
1080 explicitly initialized to 0 should not be treated as a BSS
1081 variable. */
1082 && !DECL_PERSISTENT_P (decl))));
1083 }
1084
1085 /* Compute the alignment of variable specified by DECL.
1086 DONT_OUTPUT_DATA is from assemble_variable. */
1087
1088 void
1089 align_variable (tree decl, bool dont_output_data)
1090 {
1091 unsigned int align = DECL_ALIGN (decl);
1092
1093 /* In the case for initialing an array whose length isn't specified,
1094 where we have not yet been able to do the layout,
1095 figure out the proper alignment now. */
1096 if (dont_output_data && DECL_SIZE (decl) == 0
1097 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1098 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1099
1100 /* Some object file formats have a maximum alignment which they support.
1101 In particular, a.out format supports a maximum alignment of 4. */
1102 if (align > MAX_OFILE_ALIGNMENT)
1103 {
1104 error ("alignment of %q+D is greater than maximum object "
1105 "file alignment %d", decl,
1106 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
1107 align = MAX_OFILE_ALIGNMENT;
1108 }
1109
1110 if (! DECL_USER_ALIGN (decl))
1111 {
1112 #ifdef DATA_ABI_ALIGNMENT
1113 unsigned int data_abi_align
1114 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1115 /* For backwards compatibility, don't assume the ABI alignment for
1116 TLS variables. */
1117 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD)
1118 align = data_abi_align;
1119 #endif
1120
1121 /* On some machines, it is good to increase alignment sometimes.
1122 But as DECL_ALIGN is used both for actually emitting the variable
1123 and for code accessing the variable as guaranteed alignment, we
1124 can only increase the alignment if it is a performance optimization
1125 if the references to it must bind to the current definition. */
1126 if (decl_binds_to_current_def_p (decl)
1127 && !DECL_VIRTUAL_P (decl))
1128 {
1129 #ifdef DATA_ALIGNMENT
1130 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1131 /* Don't increase alignment too much for TLS variables - TLS space
1132 is too precious. */
1133 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1134 align = data_align;
1135 #endif
1136 if (DECL_INITIAL (decl) != 0
1137 /* In LTO we have no errors in program; error_mark_node is used
1138 to mark offlined constructors. */
1139 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1140 {
1141 unsigned int const_align
1142 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1143 /* Don't increase alignment too much for TLS variables - TLS
1144 space is too precious. */
1145 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1146 align = const_align;
1147 }
1148 }
1149 }
1150
1151 /* Reset the alignment in case we have made it tighter, so we can benefit
1152 from it in get_pointer_alignment. */
1153 SET_DECL_ALIGN (decl, align);
1154 }
1155
1156 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes
1157 beyond what align_variable returned. */
1158
1159 static unsigned int
1160 get_variable_align (tree decl)
1161 {
1162 unsigned int align = DECL_ALIGN (decl);
1163
1164 /* For user aligned vars or static vars align_variable already did
1165 everything. */
1166 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl))
1167 return align;
1168
1169 #ifdef DATA_ABI_ALIGNMENT
1170 if (DECL_THREAD_LOCAL_P (decl))
1171 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1172 #endif
1173
1174 /* For decls that bind to the current definition, align_variable
1175 did also everything, except for not assuming ABI required alignment
1176 of TLS variables. For other vars, increase the alignment here
1177 as an optimization. */
1178 if (!decl_binds_to_current_def_p (decl))
1179 {
1180 /* On some machines, it is good to increase alignment sometimes. */
1181 #ifdef DATA_ALIGNMENT
1182 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1183 /* Don't increase alignment too much for TLS variables - TLS space
1184 is too precious. */
1185 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1186 align = data_align;
1187 #endif
1188 if (DECL_INITIAL (decl) != 0
1189 /* In LTO we have no errors in program; error_mark_node is used
1190 to mark offlined constructors. */
1191 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1192 {
1193 unsigned int const_align
1194 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1195 /* Don't increase alignment too much for TLS variables - TLS space
1196 is too precious. */
1197 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1198 align = const_align;
1199 }
1200 }
1201
1202 return align;
1203 }
1204
1205 /* Compute reloc for get_variable_section. The return value
1206 is a mask for which bit 1 indicates a global relocation, and bit 0
1207 indicates a local relocation. */
1208
1209 int
1210 compute_reloc_for_var (tree decl)
1211 {
1212 int reloc;
1213
1214 if (DECL_INITIAL (decl) == error_mark_node)
1215 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
1216 else if (DECL_INITIAL (decl))
1217 reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
1218 else
1219 reloc = 0;
1220
1221 return reloc;
1222 }
1223
1224 /* Return the section into which the given VAR_DECL or CONST_DECL
1225 should be placed. PREFER_NOSWITCH_P is true if a noswitch
1226 section should be used wherever possible. */
1227
1228 section *
1229 get_variable_section (tree decl, bool prefer_noswitch_p)
1230 {
1231 addr_space_t as = ADDR_SPACE_GENERIC;
1232 int reloc;
1233 varpool_node *vnode = varpool_node::get (decl);
1234 if (vnode)
1235 {
1236 vnode = vnode->ultimate_alias_target ();
1237 decl = vnode->decl;
1238 }
1239
1240 if (TREE_TYPE (decl) != error_mark_node)
1241 as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1242
1243 /* We need the constructor to figure out reloc flag. */
1244 if (vnode)
1245 vnode->get_constructor ();
1246
1247 if (DECL_COMMON (decl)
1248 && !lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
1249 {
1250 /* If the decl has been given an explicit section name, or it resides
1251 in a non-generic address space, then it isn't common, and shouldn't
1252 be handled as such. */
1253 gcc_assert (DECL_SECTION_NAME (decl) == NULL
1254 && ADDR_SPACE_GENERIC_P (as));
1255 if (DECL_THREAD_LOCAL_P (decl))
1256 return tls_comm_section;
1257 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
1258 return comm_section;
1259 }
1260
1261 reloc = compute_reloc_for_var (decl);
1262
1263 resolve_unique_section (decl, reloc, flag_data_sections);
1264 if (IN_NAMED_SECTION (decl))
1265 {
1266 section *sect = get_named_section (decl, NULL, reloc);
1267
1268 if ((sect->common.flags & SECTION_BSS)
1269 && !bss_initializer_p (decl, true))
1270 {
1271 error_at (DECL_SOURCE_LOCATION (decl),
1272 "only zero initializers are allowed in section %qs",
1273 sect->named.name);
1274 DECL_INITIAL (decl) = error_mark_node;
1275 }
1276 return sect;
1277 }
1278
1279 if (ADDR_SPACE_GENERIC_P (as)
1280 && !DECL_THREAD_LOCAL_P (decl)
1281 && !DECL_NOINIT_P (decl)
1282 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections)
1283 && bss_initializer_p (decl))
1284 {
1285 if (!TREE_PUBLIC (decl)
1286 && !((flag_sanitize & SANITIZE_ADDRESS)
1287 && asan_protect_global (decl)))
1288 return lcomm_section;
1289 if (bss_noswitch_section)
1290 return bss_noswitch_section;
1291 }
1292
1293 return targetm.asm_out.select_section (decl, reloc,
1294 get_variable_align (decl));
1295 }
1296
1297 /* Return the block into which object_block DECL should be placed. */
1298
1299 static struct object_block *
1300 get_block_for_decl (tree decl)
1301 {
1302 section *sect;
1303
1304 if (VAR_P (decl))
1305 {
1306 /* The object must be defined in this translation unit. */
1307 if (DECL_EXTERNAL (decl))
1308 return NULL;
1309
1310 /* There's no point using object blocks for something that is
1311 isolated by definition. */
1312 if (DECL_COMDAT_GROUP (decl))
1313 return NULL;
1314 }
1315
1316 /* We can only calculate block offsets if the decl has a known
1317 constant size. */
1318 if (DECL_SIZE_UNIT (decl) == NULL)
1319 return NULL;
1320 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
1321 return NULL;
1322
1323 /* Find out which section should contain DECL. We cannot put it into
1324 an object block if it requires a standalone definition. */
1325 if (VAR_P (decl))
1326 align_variable (decl, 0);
1327 sect = get_variable_section (decl, true);
1328 if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
1329 return NULL;
1330
1331 if (bool (lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
1332 != bool (sect->common.flags & SECTION_RETAIN))
1333 return NULL;
1334
1335 return get_block_for_section (sect);
1336 }
1337
1338 /* Make sure block symbol SYMBOL is in block BLOCK. */
1339
1340 static void
1341 change_symbol_block (rtx symbol, struct object_block *block)
1342 {
1343 if (block != SYMBOL_REF_BLOCK (symbol))
1344 {
1345 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
1346 SYMBOL_REF_BLOCK (symbol) = block;
1347 }
1348 }
1349
1350 /* Return true if it is possible to put DECL in an object_block. */
1351
1352 static bool
1353 use_blocks_for_decl_p (tree decl)
1354 {
1355 struct symtab_node *snode;
1356
1357 /* Don't create object blocks if each DECL is placed into a separate
1358 section because that will uselessly create a section anchor for
1359 each DECL. */
1360 if (flag_data_sections)
1361 return false;
1362
1363 /* Only data DECLs can be placed into object blocks. */
1364 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
1365 return false;
1366
1367 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
1368 are never used from code directly and we never want object block handling
1369 for those. */
1370 if (DECL_INITIAL (decl) == decl)
1371 return false;
1372
1373 /* If this decl is an alias, then we don't want to emit a
1374 definition. */
1375 if (VAR_P (decl)
1376 && (snode = symtab_node::get (decl)) != NULL
1377 && snode->alias)
1378 return false;
1379
1380 return targetm.use_blocks_for_decl_p (decl);
1381 }
1382
1383 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS
1384 until we find an identifier that is not itself a transparent alias.
1385 Modify the alias passed to it by reference (and all aliases on the
1386 way to the ultimate target), such that they do not have to be
1387 followed again, and return the ultimate target of the alias
1388 chain. */
1389
1390 static inline tree
1391 ultimate_transparent_alias_target (tree *alias)
1392 {
1393 tree target = *alias;
1394
1395 if (IDENTIFIER_TRANSPARENT_ALIAS (target))
1396 {
1397 gcc_assert (TREE_CHAIN (target));
1398 target = ultimate_transparent_alias_target (&TREE_CHAIN (target));
1399 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target)
1400 && ! TREE_CHAIN (target));
1401 *alias = target;
1402 }
1403
1404 return target;
1405 }
1406
1407 /* Return true if REGNUM is mentioned in ELIMINABLE_REGS as a from
1408 register number. */
1409
1410 static bool
1411 eliminable_regno_p (int regnum)
1412 {
1413 static const struct
1414 {
1415 const int from;
1416 const int to;
1417 } eliminables[] = ELIMINABLE_REGS;
1418 for (size_t i = 0; i < ARRAY_SIZE (eliminables); i++)
1419 if (regnum == eliminables[i].from)
1420 return true;
1421 return false;
1422 }
1423
1424 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
1425 have static storage duration. In other words, it should not be an
1426 automatic variable, including PARM_DECLs.
1427
1428 There is, however, one exception: this function handles variables
1429 explicitly placed in a particular register by the user.
1430
1431 This is never called for PARM_DECL nodes. */
1432
1433 void
1434 make_decl_rtl (tree decl)
1435 {
1436 const char *name = 0;
1437 int reg_number;
1438 tree id;
1439 rtx x;
1440
1441 /* Check that we are not being given an automatic variable. */
1442 gcc_assert (TREE_CODE (decl) != PARM_DECL
1443 && TREE_CODE (decl) != RESULT_DECL);
1444
1445 /* A weak alias has TREE_PUBLIC set but not the other bits. */
1446 gcc_assert (!VAR_P (decl)
1447 || TREE_STATIC (decl)
1448 || TREE_PUBLIC (decl)
1449 || DECL_EXTERNAL (decl)
1450 || DECL_REGISTER (decl));
1451
1452 /* And that we were not given a type or a label. */
1453 gcc_assert (TREE_CODE (decl) != TYPE_DECL
1454 && TREE_CODE (decl) != LABEL_DECL);
1455
1456 /* For a duplicate declaration, we can be called twice on the
1457 same DECL node. Don't discard the RTL already made. */
1458 if (DECL_RTL_SET_P (decl))
1459 {
1460 /* If the old RTL had the wrong mode, fix the mode. */
1461 x = DECL_RTL (decl);
1462 if (GET_MODE (x) != DECL_MODE (decl))
1463 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
1464
1465 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1466 return;
1467
1468 /* ??? Another way to do this would be to maintain a hashed
1469 table of such critters. Instead of adding stuff to a DECL
1470 to give certain attributes to it, we could use an external
1471 hash map from DECL to set of attributes. */
1472
1473 /* Let the target reassign the RTL if it wants.
1474 This is necessary, for example, when one machine specific
1475 decl attribute overrides another. */
1476 targetm.encode_section_info (decl, DECL_RTL (decl), false);
1477
1478 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based
1479 on the new decl information. */
1480 if (MEM_P (x)
1481 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1482 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
1483 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
1484
1485 return;
1486 }
1487
1488 /* If this variable belongs to the global constant pool, retrieve the
1489 pre-computed RTL or recompute it in LTO mode. */
1490 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
1491 {
1492 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1));
1493 return;
1494 }
1495
1496 id = DECL_ASSEMBLER_NAME (decl);
1497 name = IDENTIFIER_POINTER (id);
1498
1499 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
1500 && DECL_REGISTER (decl))
1501 {
1502 error ("register name not specified for %q+D", decl);
1503 }
1504 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1505 {
1506 const char *asmspec = name+1;
1507 machine_mode mode = DECL_MODE (decl);
1508 reg_number = decode_reg_name (asmspec);
1509 /* First detect errors in declaring global registers. */
1510 if (reg_number == -1)
1511 error ("register name not specified for %q+D", decl);
1512 else if (reg_number < 0)
1513 error ("invalid register name for %q+D", decl);
1514 else if (mode == BLKmode)
1515 error ("data type of %q+D isn%'t suitable for a register",
1516 decl);
1517 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number))
1518 error ("the register specified for %q+D cannot be accessed"
1519 " by the current target", decl);
1520 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
1521 error ("the register specified for %q+D is not general enough"
1522 " to be used as a register variable", decl);
1523 else if (!targetm.hard_regno_mode_ok (reg_number, mode))
1524 error ("register specified for %q+D isn%'t suitable for data type",
1525 decl);
1526 else if (reg_number != HARD_FRAME_POINTER_REGNUM
1527 && (reg_number == FRAME_POINTER_REGNUM
1528 #ifdef RETURN_ADDRESS_POINTER_REGNUM
1529 || reg_number == RETURN_ADDRESS_POINTER_REGNUM
1530 #endif
1531 || reg_number == ARG_POINTER_REGNUM)
1532 && eliminable_regno_p (reg_number))
1533 error ("register specified for %q+D is an internal GCC "
1534 "implementation detail", decl);
1535 /* Now handle properly declared static register variables. */
1536 else
1537 {
1538 int nregs;
1539
1540 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl))
1541 {
1542 DECL_INITIAL (decl) = 0;
1543 error ("global register variable has initial value");
1544 }
1545 if (TREE_THIS_VOLATILE (decl))
1546 warning (OPT_Wvolatile_register_var,
1547 "optimization may eliminate reads and/or "
1548 "writes to register variables");
1549
1550 /* If the user specified one of the eliminables registers here,
1551 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable
1552 confused with that register and be eliminated. This usage is
1553 somewhat suspect... */
1554
1555 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number));
1556 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number;
1557 REG_USERVAR_P (DECL_RTL (decl)) = 1;
1558
1559 if (TREE_STATIC (decl))
1560 {
1561 /* Make this register global, so not usable for anything
1562 else. */
1563 #ifdef ASM_DECLARE_REGISTER_GLOBAL
1564 name = IDENTIFIER_POINTER (DECL_NAME (decl));
1565 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
1566 #endif
1567 nregs = hard_regno_nregs (reg_number, mode);
1568 while (nregs > 0)
1569 globalize_reg (decl, reg_number + --nregs);
1570 }
1571
1572 /* As a register variable, it has no section. */
1573 return;
1574 }
1575 /* Avoid internal errors from invalid register
1576 specifications. */
1577 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE);
1578 DECL_HARD_REGISTER (decl) = 0;
1579 /* Also avoid SSA inconsistencies by pretending this is an external
1580 decl now. */
1581 DECL_EXTERNAL (decl) = 1;
1582 return;
1583 }
1584 /* Now handle ordinary static variables and functions (in memory).
1585 Also handle vars declared register invalidly. */
1586 else if (name[0] == '*')
1587 {
1588 #ifdef REGISTER_PREFIX
1589 if (strlen (REGISTER_PREFIX) != 0)
1590 {
1591 reg_number = decode_reg_name (name);
1592 if (reg_number >= 0 || reg_number == -3)
1593 error ("register name given for non-register variable %q+D", decl);
1594 }
1595 #endif
1596 }
1597
1598 /* Specifying a section attribute on a variable forces it into a
1599 non-.bss section, and thus it cannot be common. */
1600 /* FIXME: In general this code should not be necessary because
1601 visibility pass is doing the same work. But notice_global_symbol
1602 is called early and it needs to make DECL_RTL to get the name.
1603 we take care of recomputing the DECL_RTL after visibility is changed. */
1604 if (VAR_P (decl)
1605 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1606 && DECL_SECTION_NAME (decl) != NULL
1607 && DECL_INITIAL (decl) == NULL_TREE
1608 && DECL_COMMON (decl))
1609 DECL_COMMON (decl) = 0;
1610
1611 /* Variables can't be both common and weak. */
1612 if (VAR_P (decl) && DECL_WEAK (decl))
1613 DECL_COMMON (decl) = 0;
1614
1615 if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
1616 x = create_block_symbol (name, get_block_for_decl (decl), -1);
1617 else
1618 {
1619 machine_mode address_mode = Pmode;
1620 if (TREE_TYPE (decl) != error_mark_node)
1621 {
1622 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1623 address_mode = targetm.addr_space.address_mode (as);
1624 }
1625 x = gen_rtx_SYMBOL_REF (address_mode, name);
1626 }
1627 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
1628 SET_SYMBOL_REF_DECL (x, decl);
1629
1630 x = gen_rtx_MEM (DECL_MODE (decl), x);
1631 if (TREE_CODE (decl) != FUNCTION_DECL)
1632 set_mem_attributes (x, decl, 1);
1633 SET_DECL_RTL (decl, x);
1634
1635 /* Optionally set flags or add text to the name to record information
1636 such as that it is a function name.
1637 If the name is changed, the macro ASM_OUTPUT_LABELREF
1638 will have to know how to strip this information. */
1639 targetm.encode_section_info (decl, DECL_RTL (decl), true);
1640 }
1641
1642 /* Like make_decl_rtl, but inhibit creation of new alias sets when
1643 calling make_decl_rtl. Also, reset DECL_RTL before returning the
1644 rtl. */
1645
1646 rtx
1647 make_decl_rtl_for_debug (tree decl)
1648 {
1649 unsigned int save_aliasing_flag;
1650 rtx rtl;
1651
1652 if (DECL_RTL_SET_P (decl))
1653 return DECL_RTL (decl);
1654
1655 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will
1656 call new_alias_set. If running with -fcompare-debug, sometimes
1657 we do not want to create alias sets that will throw the alias
1658 numbers off in the comparison dumps. So... clearing
1659 flag_strict_aliasing will keep new_alias_set() from creating a
1660 new set. */
1661 save_aliasing_flag = flag_strict_aliasing;
1662 flag_strict_aliasing = 0;
1663
1664 rtl = DECL_RTL (decl);
1665 /* Reset DECL_RTL back, as various parts of the compiler expects
1666 DECL_RTL set meaning it is actually going to be output. */
1667 SET_DECL_RTL (decl, NULL);
1668
1669 flag_strict_aliasing = save_aliasing_flag;
1670 return rtl;
1671 }
1672 \f
1673 /* Output a string of literal assembler code
1674 for an `asm' keyword used between functions. */
1675
1676 void
1677 assemble_asm (tree string)
1678 {
1679 const char *p;
1680 app_enable ();
1681
1682 if (TREE_CODE (string) == ADDR_EXPR)
1683 string = TREE_OPERAND (string, 0);
1684
1685 p = TREE_STRING_POINTER (string);
1686 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p);
1687 }
1688
1689 /* Write the address of the entity given by SYMBOL to SEC. */
1690 void
1691 assemble_addr_to_section (rtx symbol, section *sec)
1692 {
1693 switch_to_section (sec);
1694 assemble_align (POINTER_SIZE);
1695 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1);
1696 }
1697
1698 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
1699 not) section for PRIORITY. */
1700 section *
1701 get_cdtor_priority_section (int priority, bool constructor_p)
1702 {
1703 /* Buffer conservatively large enough for the full range of a 32-bit
1704 int plus the text below. */
1705 char buf[18];
1706
1707 /* ??? This only works reliably with the GNU linker. */
1708 sprintf (buf, "%s.%.5u",
1709 constructor_p ? ".ctors" : ".dtors",
1710 /* Invert the numbering so the linker puts us in the proper
1711 order; constructors are run from right to left, and the
1712 linker sorts in increasing order. */
1713 MAX_INIT_PRIORITY - priority);
1714 return get_section (buf, SECTION_WRITE, NULL);
1715 }
1716
1717 void
1718 default_named_section_asm_out_destructor (rtx symbol, int priority)
1719 {
1720 section *sec;
1721
1722 if (priority != DEFAULT_INIT_PRIORITY)
1723 sec = get_cdtor_priority_section (priority,
1724 /*constructor_p=*/false);
1725 else
1726 sec = get_section (".dtors", SECTION_WRITE, NULL);
1727
1728 assemble_addr_to_section (symbol, sec);
1729 }
1730
1731 #ifdef DTORS_SECTION_ASM_OP
1732 void
1733 default_dtor_section_asm_out_destructor (rtx symbol,
1734 int priority ATTRIBUTE_UNUSED)
1735 {
1736 assemble_addr_to_section (symbol, dtors_section);
1737 }
1738 #endif
1739
1740 void
1741 default_named_section_asm_out_constructor (rtx symbol, int priority)
1742 {
1743 section *sec;
1744
1745 if (priority != DEFAULT_INIT_PRIORITY)
1746 sec = get_cdtor_priority_section (priority,
1747 /*constructor_p=*/true);
1748 else
1749 sec = get_section (".ctors", SECTION_WRITE, NULL);
1750
1751 assemble_addr_to_section (symbol, sec);
1752 }
1753
1754 #ifdef CTORS_SECTION_ASM_OP
1755 void
1756 default_ctor_section_asm_out_constructor (rtx symbol,
1757 int priority ATTRIBUTE_UNUSED)
1758 {
1759 assemble_addr_to_section (symbol, ctors_section);
1760 }
1761 #endif
1762 \f
1763 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with
1764 a nonzero value if the constant pool should be output before the
1765 start of the function, or a zero value if the pool should output
1766 after the end of the function. The default is to put it before the
1767 start. */
1768
1769 #ifndef CONSTANT_POOL_BEFORE_FUNCTION
1770 #define CONSTANT_POOL_BEFORE_FUNCTION 1
1771 #endif
1772
1773 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going
1774 to be output to assembler.
1775 Set first_global_object_name and weak_global_object_name as appropriate. */
1776
1777 void
1778 notice_global_symbol (tree decl)
1779 {
1780 const char **t = &first_global_object_name;
1781
1782 if (first_global_object_name
1783 || !TREE_PUBLIC (decl)
1784 || DECL_EXTERNAL (decl)
1785 || !DECL_NAME (decl)
1786 || (VAR_P (decl) && DECL_HARD_REGISTER (decl))
1787 || (TREE_CODE (decl) != FUNCTION_DECL
1788 && (!VAR_P (decl)
1789 || (DECL_COMMON (decl)
1790 && (DECL_INITIAL (decl) == 0
1791 || DECL_INITIAL (decl) == error_mark_node)))))
1792 return;
1793
1794 /* We win when global object is found, but it is useful to know about weak
1795 symbol as well so we can produce nicer unique names. */
1796 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
1797 t = &weak_global_object_name;
1798
1799 if (!*t)
1800 {
1801 tree id = DECL_ASSEMBLER_NAME (decl);
1802 ultimate_transparent_alias_target (&id);
1803 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id)));
1804 }
1805 }
1806
1807 /* If not using flag_reorder_blocks_and_partition, decide early whether the
1808 current function goes into the cold section, so that targets can use
1809 current_function_section during RTL expansion. DECL describes the
1810 function. */
1811
1812 void
1813 decide_function_section (tree decl)
1814 {
1815 first_function_block_is_cold = false;
1816
1817 if (DECL_SECTION_NAME (decl))
1818 {
1819 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1820 /* Calls to function_section rely on first_function_block_is_cold
1821 being accurate. */
1822 first_function_block_is_cold = (node
1823 && node->frequency
1824 == NODE_FREQUENCY_UNLIKELY_EXECUTED);
1825 }
1826
1827 in_cold_section_p = first_function_block_is_cold;
1828 }
1829
1830 /* Get the function's name, as described by its RTL. This may be
1831 different from the DECL_NAME name used in the source file. */
1832 const char *
1833 get_fnname_from_decl (tree decl)
1834 {
1835 rtx x = DECL_RTL (decl);
1836 gcc_assert (MEM_P (x));
1837 x = XEXP (x, 0);
1838 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1839 return XSTR (x, 0);
1840 }
1841
1842 /* Output assembler code for the constant pool of a function and associated
1843 with defining the name of the function. DECL describes the function.
1844 NAME is the function's name. For the constant pool, we use the current
1845 constant pool data. */
1846
1847 void
1848 assemble_start_function (tree decl, const char *fnname)
1849 {
1850 int align;
1851 char tmp_label[100];
1852 bool hot_label_written = false;
1853
1854 if (crtl->has_bb_partition)
1855 {
1856 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
1857 crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
1858 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
1859 crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
1860 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
1861 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
1862 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
1863 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
1864 const_labelno++;
1865 cold_function_name = NULL_TREE;
1866 }
1867 else
1868 {
1869 crtl->subsections.hot_section_label = NULL;
1870 crtl->subsections.cold_section_label = NULL;
1871 crtl->subsections.hot_section_end_label = NULL;
1872 crtl->subsections.cold_section_end_label = NULL;
1873 }
1874
1875 /* The following code does not need preprocessing in the assembler. */
1876
1877 app_disable ();
1878
1879 if (CONSTANT_POOL_BEFORE_FUNCTION)
1880 output_constant_pool (fnname, decl);
1881
1882 align = symtab_node::get (decl)->definition_alignment ();
1883
1884 /* Make sure the not and cold text (code) sections are properly
1885 aligned. This is necessary here in the case where the function
1886 has both hot and cold sections, because we don't want to re-set
1887 the alignment when the section switch happens mid-function. */
1888
1889 if (crtl->has_bb_partition)
1890 {
1891 first_function_block_is_cold = false;
1892
1893 switch_to_section (unlikely_text_section ());
1894 assemble_align (align);
1895 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
1896
1897 /* When the function starts with a cold section, we need to explicitly
1898 align the hot section and write out the hot section label.
1899 But if the current function is a thunk, we do not have a CFG. */
1900 if (!cfun->is_thunk
1901 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
1902 {
1903 switch_to_section (text_section);
1904 assemble_align (align);
1905 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1906 hot_label_written = true;
1907 first_function_block_is_cold = true;
1908 }
1909 in_cold_section_p = first_function_block_is_cold;
1910 }
1911
1912
1913 /* Switch to the correct text section for the start of the function. */
1914
1915 switch_to_section (function_section (decl), decl);
1916 if (crtl->has_bb_partition && !hot_label_written)
1917 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1918
1919 /* Tell assembler to move to target machine's alignment for functions. */
1920 align = floor_log2 (align / BITS_PER_UNIT);
1921 if (align > 0)
1922 {
1923 ASM_OUTPUT_ALIGN (asm_out_file, align);
1924 }
1925
1926 /* Handle a user-specified function alignment.
1927 Note that we still need to align to DECL_ALIGN, as above,
1928 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
1929 if (! DECL_USER_ALIGN (decl)
1930 && align_functions.levels[0].log > align
1931 && optimize_function_for_speed_p (cfun))
1932 {
1933 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1934 int align_log = align_functions.levels[0].log;
1935 #endif
1936 int max_skip = align_functions.levels[0].maxskip;
1937 if (flag_limit_function_alignment && crtl->max_insn_address > 0
1938 && max_skip >= crtl->max_insn_address)
1939 max_skip = crtl->max_insn_address - 1;
1940
1941 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1942 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
1943 if (max_skip == align_functions.levels[0].maxskip)
1944 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
1945 align_functions.levels[1].log,
1946 align_functions.levels[1].maxskip);
1947 #else
1948 ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log);
1949 #endif
1950 }
1951
1952 #ifdef ASM_OUTPUT_FUNCTION_PREFIX
1953 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname);
1954 #endif
1955
1956 if (!DECL_IGNORED_P (decl))
1957 (*debug_hooks->begin_function) (decl);
1958
1959 /* Make function name accessible from other files, if appropriate. */
1960
1961 if (TREE_PUBLIC (decl))
1962 {
1963 notice_global_symbol (decl);
1964
1965 globalize_decl (decl);
1966
1967 maybe_assemble_visibility (decl);
1968 }
1969
1970 if (DECL_PRESERVE_P (decl))
1971 targetm.asm_out.mark_decl_preserved (fnname);
1972
1973 unsigned short patch_area_size = crtl->patch_area_size;
1974 unsigned short patch_area_entry = crtl->patch_area_entry;
1975
1976 /* Emit the patching area before the entry label, if any. */
1977 if (patch_area_entry > 0)
1978 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1979 patch_area_entry, true);
1980
1981 /* Do any machine/system dependent processing of the function name. */
1982 #ifdef ASM_DECLARE_FUNCTION_NAME
1983 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
1984 #else
1985 /* Standard thing is just output label for the function. */
1986 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
1987 #endif /* ASM_DECLARE_FUNCTION_NAME */
1988
1989 /* And the area after the label. Record it if we haven't done so yet. */
1990 if (patch_area_size > patch_area_entry)
1991 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1992 patch_area_size
1993 - patch_area_entry,
1994 patch_area_entry == 0);
1995
1996 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
1997 saw_no_split_stack = true;
1998 }
1999
2000 /* Output assembler code associated with defining the size of the
2001 function. DECL describes the function. NAME is the function's name. */
2002
2003 void
2004 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED)
2005 {
2006 #ifdef ASM_DECLARE_FUNCTION_SIZE
2007 /* We could have switched section in the middle of the function. */
2008 if (crtl->has_bb_partition)
2009 switch_to_section (function_section (decl));
2010 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
2011 #endif
2012 if (! CONSTANT_POOL_BEFORE_FUNCTION)
2013 {
2014 output_constant_pool (fnname, decl);
2015 switch_to_section (function_section (decl)); /* need to switch back */
2016 }
2017 /* Output labels for end of hot/cold text sections (to be used by
2018 debug info.) */
2019 if (crtl->has_bb_partition)
2020 {
2021 section *save_text_section;
2022
2023 save_text_section = in_section;
2024 switch_to_section (unlikely_text_section ());
2025 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE
2026 if (cold_function_name != NULL_TREE)
2027 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file,
2028 IDENTIFIER_POINTER (cold_function_name),
2029 decl);
2030 #endif
2031 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
2032 if (first_function_block_is_cold)
2033 switch_to_section (text_section);
2034 else
2035 switch_to_section (function_section (decl));
2036 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
2037 switch_to_section (save_text_section);
2038 }
2039 }
2040 \f
2041 /* Assemble code to leave SIZE bytes of zeros. */
2042
2043 void
2044 assemble_zeros (unsigned HOST_WIDE_INT size)
2045 {
2046 /* Do no output if -fsyntax-only. */
2047 if (flag_syntax_only)
2048 return;
2049
2050 #ifdef ASM_NO_SKIP_IN_TEXT
2051 /* The `space' pseudo in the text section outputs nop insns rather than 0s,
2052 so we must output 0s explicitly in the text section. */
2053 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0)
2054 {
2055 unsigned HOST_WIDE_INT i;
2056 for (i = 0; i < size; i++)
2057 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1);
2058 }
2059 else
2060 #endif
2061 if (size > 0)
2062 ASM_OUTPUT_SKIP (asm_out_file, size);
2063 }
2064
2065 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
2066
2067 void
2068 assemble_align (unsigned int align)
2069 {
2070 if (align > BITS_PER_UNIT)
2071 {
2072 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2073 }
2074 }
2075
2076 /* Assemble a string constant with the specified C string as contents. */
2077
2078 void
2079 assemble_string (const char *p, int size)
2080 {
2081 int pos = 0;
2082 int maximum = 2000;
2083
2084 /* If the string is very long, split it up. */
2085
2086 while (pos < size)
2087 {
2088 int thissize = size - pos;
2089 if (thissize > maximum)
2090 thissize = maximum;
2091
2092 ASM_OUTPUT_ASCII (asm_out_file, p, thissize);
2093
2094 pos += thissize;
2095 p += thissize;
2096 }
2097 }
2098
2099 \f
2100 /* A noswitch_section_callback for lcomm_section. */
2101
2102 static bool
2103 emit_local (tree decl ATTRIBUTE_UNUSED,
2104 const char *name ATTRIBUTE_UNUSED,
2105 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2106 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2107 {
2108 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL
2109 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2110 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name,
2111 size, align);
2112 return true;
2113 #elif defined ASM_OUTPUT_ALIGNED_LOCAL
2114 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2115 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align);
2116 return true;
2117 #else
2118 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2119 return false;
2120 #endif
2121 }
2122
2123 /* A noswitch_section_callback for bss_noswitch_section. */
2124
2125 #if defined ASM_OUTPUT_ALIGNED_BSS
2126 static bool
2127 emit_bss (tree decl ATTRIBUTE_UNUSED,
2128 const char *name ATTRIBUTE_UNUSED,
2129 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2130 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2131 {
2132 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size,
2133 get_variable_align (decl));
2134 return true;
2135 }
2136 #endif
2137
2138 /* A noswitch_section_callback for comm_section. */
2139
2140 static bool
2141 emit_common (tree decl ATTRIBUTE_UNUSED,
2142 const char *name ATTRIBUTE_UNUSED,
2143 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2144 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2145 {
2146 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON
2147 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name,
2148 size, get_variable_align (decl));
2149 return true;
2150 #elif defined ASM_OUTPUT_ALIGNED_COMMON
2151 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size,
2152 get_variable_align (decl));
2153 return true;
2154 #else
2155 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded);
2156 return false;
2157 #endif
2158 }
2159
2160 /* A noswitch_section_callback for tls_comm_section. */
2161
2162 static bool
2163 emit_tls_common (tree decl ATTRIBUTE_UNUSED,
2164 const char *name ATTRIBUTE_UNUSED,
2165 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2166 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2167 {
2168 #ifdef ASM_OUTPUT_TLS_COMMON
2169 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size);
2170 return true;
2171 #else
2172 sorry ("thread-local COMMON data not implemented");
2173 return true;
2174 #endif
2175 }
2176
2177 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT.
2178 NAME is the name of DECL's SYMBOL_REF. */
2179
2180 static void
2181 assemble_noswitch_variable (tree decl, const char *name, section *sect,
2182 unsigned int align)
2183 {
2184 unsigned HOST_WIDE_INT size, rounded;
2185
2186 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2187 rounded = size;
2188
2189 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
2190 size += asan_red_zone_size (size);
2191
2192 /* Don't allocate zero bytes of common,
2193 since that means "undefined external" in the linker. */
2194 if (size == 0)
2195 rounded = 1;
2196
2197 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2198 so that each uninitialized object starts on such a boundary. */
2199 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1;
2200 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2201 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2202
2203 if (!sect->noswitch.callback (decl, name, size, rounded)
2204 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
2205 error ("requested alignment for %q+D is greater than "
2206 "implemented alignment of %wu", decl, rounded);
2207 }
2208
2209 /* A subroutine of assemble_variable. Output the label and contents of
2210 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
2211 is as for assemble_variable. */
2212
2213 static void
2214 assemble_variable_contents (tree decl, const char *name,
2215 bool dont_output_data, bool merge_strings)
2216 {
2217 /* Do any machine/system dependent processing of the object. */
2218 #ifdef ASM_DECLARE_OBJECT_NAME
2219 last_assemble_variable_decl = decl;
2220 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
2221 #else
2222 /* Standard thing is just output label for the object. */
2223 ASM_OUTPUT_LABEL (asm_out_file, name);
2224 #endif /* ASM_DECLARE_OBJECT_NAME */
2225
2226 if (!dont_output_data)
2227 {
2228 /* Caller is supposed to use varpool_get_constructor when it wants
2229 to output the body. */
2230 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node);
2231 if (DECL_INITIAL (decl)
2232 && DECL_INITIAL (decl) != error_mark_node
2233 && !initializer_zerop (DECL_INITIAL (decl)))
2234 /* Output the actual data. */
2235 output_constant (DECL_INITIAL (decl),
2236 tree_to_uhwi (DECL_SIZE_UNIT (decl)),
2237 get_variable_align (decl),
2238 false, merge_strings);
2239 else
2240 /* Leave space for it. */
2241 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
2242 targetm.asm_out.decl_end ();
2243 }
2244 }
2245
2246 /* Write out assembly for the variable DECL, which is not defined in
2247 the current translation unit. */
2248 void
2249 assemble_undefined_decl (tree decl)
2250 {
2251 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2252 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl);
2253 }
2254
2255 /* Assemble everything that is needed for a variable or function declaration.
2256 Not used for automatic variables, and not used for function definitions.
2257 Should not be called for variables of incomplete structure type.
2258
2259 TOP_LEVEL is nonzero if this variable has file scope.
2260 AT_END is nonzero if this is the special handling, at end of compilation,
2261 to define things that have had only tentative definitions.
2262 DONT_OUTPUT_DATA if nonzero means don't actually output the
2263 initial value (that will be done by the caller). */
2264
2265 void
2266 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
2267 int at_end ATTRIBUTE_UNUSED, int dont_output_data)
2268 {
2269 const char *name;
2270 rtx decl_rtl, symbol;
2271 section *sect;
2272 unsigned int align;
2273 bool asan_protected = false;
2274
2275 /* This function is supposed to handle VARIABLES. Ensure we have one. */
2276 gcc_assert (VAR_P (decl));
2277
2278 /* Emulated TLS had better not get this far. */
2279 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl));
2280
2281 last_assemble_variable_decl = 0;
2282
2283 /* Normally no need to say anything here for external references,
2284 since assemble_external is called by the language-specific code
2285 when a declaration is first seen. */
2286
2287 if (DECL_EXTERNAL (decl))
2288 return;
2289
2290 /* Do nothing for global register variables. */
2291 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)))
2292 {
2293 TREE_ASM_WRITTEN (decl) = 1;
2294 return;
2295 }
2296
2297 /* If type was incomplete when the variable was declared,
2298 see if it is complete now. */
2299
2300 if (DECL_SIZE (decl) == 0)
2301 layout_decl (decl, 0);
2302
2303 /* Still incomplete => don't allocate it; treat the tentative defn
2304 (which is what it must have been) as an `extern' reference. */
2305
2306 if (!dont_output_data && DECL_SIZE (decl) == 0)
2307 {
2308 error ("storage size of %q+D isn%'t known", decl);
2309 TREE_ASM_WRITTEN (decl) = 1;
2310 return;
2311 }
2312
2313 /* The first declaration of a variable that comes through this function
2314 decides whether it is global (in C, has external linkage)
2315 or local (in C, has internal linkage). So do nothing more
2316 if this function has already run. */
2317
2318 if (TREE_ASM_WRITTEN (decl))
2319 return;
2320
2321 /* Make sure targetm.encode_section_info is invoked before we set
2322 ASM_WRITTEN. */
2323 decl_rtl = DECL_RTL (decl);
2324
2325 TREE_ASM_WRITTEN (decl) = 1;
2326
2327 /* Do no output if -fsyntax-only. */
2328 if (flag_syntax_only)
2329 return;
2330
2331 if (! dont_output_data
2332 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl)))
2333 {
2334 error ("size of variable %q+D is too large", decl);
2335 return;
2336 }
2337
2338 gcc_assert (MEM_P (decl_rtl));
2339 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
2340 symbol = XEXP (decl_rtl, 0);
2341
2342 /* If this symbol belongs to the tree constant pool, output the constant
2343 if it hasn't already been written. */
2344 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2345 {
2346 tree decl = SYMBOL_REF_DECL (symbol);
2347 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
2348 output_constant_def_contents (symbol);
2349 return;
2350 }
2351
2352 app_disable ();
2353
2354 name = XSTR (symbol, 0);
2355 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
2356 notice_global_symbol (decl);
2357
2358 /* Compute the alignment of this data. */
2359
2360 align_variable (decl, dont_output_data);
2361
2362 if ((flag_sanitize & SANITIZE_ADDRESS)
2363 && asan_protect_global (decl))
2364 {
2365 asan_protected = true;
2366 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
2367 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
2368 }
2369
2370 set_mem_align (decl_rtl, DECL_ALIGN (decl));
2371
2372 align = get_variable_align (decl);
2373
2374 if (TREE_PUBLIC (decl))
2375 maybe_assemble_visibility (decl);
2376
2377 if (DECL_PRESERVE_P (decl))
2378 targetm.asm_out.mark_decl_preserved (name);
2379
2380 /* First make the assembler name(s) global if appropriate. */
2381 sect = get_variable_section (decl, false);
2382 if (TREE_PUBLIC (decl)
2383 && (sect->common.flags & SECTION_COMMON) == 0)
2384 globalize_decl (decl);
2385
2386 /* Output any data that we will need to use the address of. */
2387 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
2388 output_addressed_constants (DECL_INITIAL (decl), 0);
2389
2390 /* dbxout.c needs to know this. */
2391 if (sect && (sect->common.flags & SECTION_CODE) != 0)
2392 DECL_IN_TEXT_SECTION (decl) = 1;
2393
2394 /* If the decl is part of an object_block, make sure that the decl
2395 has been positioned within its block, but do not write out its
2396 definition yet. output_object_blocks will do that later. */
2397 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
2398 {
2399 gcc_assert (!dont_output_data);
2400 place_block_symbol (symbol);
2401 }
2402 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
2403 assemble_noswitch_variable (decl, name, sect, align);
2404 else
2405 {
2406 /* Special-case handling of vtv comdat sections. */
2407 if (sect->named.name
2408 && (strcmp (sect->named.name, ".vtable_map_vars") == 0))
2409 handle_vtv_comdat_section (sect, decl);
2410 else
2411 switch_to_section (sect, decl);
2412 if (align > BITS_PER_UNIT)
2413 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2414 assemble_variable_contents (decl, name, dont_output_data,
2415 (sect->common.flags & SECTION_MERGE)
2416 && (sect->common.flags & SECTION_STRINGS));
2417 if (asan_protected)
2418 {
2419 unsigned HOST_WIDE_INT int size
2420 = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2421 assemble_zeros (asan_red_zone_size (size));
2422 }
2423 }
2424 }
2425
2426
2427 /* Given a function declaration (FN_DECL), this function assembles the
2428 function into the .preinit_array section. */
2429
2430 void
2431 assemble_vtv_preinit_initializer (tree fn_decl)
2432 {
2433 section *sect;
2434 unsigned flags = SECTION_WRITE;
2435 rtx symbol = XEXP (DECL_RTL (fn_decl), 0);
2436
2437 flags |= SECTION_NOTYPE;
2438 sect = get_section (".preinit_array", flags, fn_decl);
2439 switch_to_section (sect);
2440 assemble_addr_to_section (symbol, sect);
2441 }
2442
2443 /* Return 1 if type TYPE contains any pointers. */
2444
2445 static int
2446 contains_pointers_p (tree type)
2447 {
2448 switch (TREE_CODE (type))
2449 {
2450 case POINTER_TYPE:
2451 case REFERENCE_TYPE:
2452 /* I'm not sure whether OFFSET_TYPE needs this treatment,
2453 so I'll play safe and return 1. */
2454 case OFFSET_TYPE:
2455 return 1;
2456
2457 case RECORD_TYPE:
2458 case UNION_TYPE:
2459 case QUAL_UNION_TYPE:
2460 {
2461 tree fields;
2462 /* For a type that has fields, see if the fields have pointers. */
2463 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields))
2464 if (TREE_CODE (fields) == FIELD_DECL
2465 && contains_pointers_p (TREE_TYPE (fields)))
2466 return 1;
2467 return 0;
2468 }
2469
2470 case ARRAY_TYPE:
2471 /* An array type contains pointers if its element type does. */
2472 return contains_pointers_p (TREE_TYPE (type));
2473
2474 default:
2475 return 0;
2476 }
2477 }
2478
2479 /* We delay assemble_external processing until
2480 the compilation unit is finalized. This is the best we can do for
2481 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
2482 it all the way to final. See PR 17982 for further discussion. */
2483 static GTY(()) tree pending_assemble_externals;
2484
2485 #ifdef ASM_OUTPUT_EXTERNAL
2486 /* Some targets delay some output to final using TARGET_ASM_FILE_END.
2487 As a result, assemble_external can be called after the list of externals
2488 is processed and the pointer set destroyed. */
2489 static bool pending_assemble_externals_processed;
2490
2491 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals
2492 TREE_LIST in assemble_external. */
2493 static hash_set<tree> *pending_assemble_externals_set;
2494
2495 /* True if DECL is a function decl for which no out-of-line copy exists.
2496 It is assumed that DECL's assembler name has been set. */
2497
2498 static bool
2499 incorporeal_function_p (tree decl)
2500 {
2501 if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl))
2502 {
2503 const char *name;
2504
2505 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2506 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2507 return true;
2508
2509 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2510 /* Atomic or sync builtins which have survived this far will be
2511 resolved externally and therefore are not incorporeal. */
2512 if (startswith (name, "__builtin_"))
2513 return true;
2514 }
2515 return false;
2516 }
2517
2518 /* Actually do the tests to determine if this is necessary, and invoke
2519 ASM_OUTPUT_EXTERNAL. */
2520 static void
2521 assemble_external_real (tree decl)
2522 {
2523 rtx rtl = DECL_RTL (decl);
2524
2525 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
2526 && !SYMBOL_REF_USED (XEXP (rtl, 0))
2527 && !incorporeal_function_p (decl))
2528 {
2529 /* Some systems do require some output. */
2530 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1;
2531 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0));
2532 }
2533 }
2534 #endif
2535
2536 void
2537 process_pending_assemble_externals (void)
2538 {
2539 #ifdef ASM_OUTPUT_EXTERNAL
2540 tree list;
2541 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list))
2542 assemble_external_real (TREE_VALUE (list));
2543
2544 pending_assemble_externals = 0;
2545 pending_assemble_externals_processed = true;
2546 delete pending_assemble_externals_set;
2547 #endif
2548 }
2549
2550 /* This TREE_LIST contains any weak symbol declarations waiting
2551 to be emitted. */
2552 static GTY(()) tree weak_decls;
2553
2554 /* Output something to declare an external symbol to the assembler,
2555 and qualifiers such as weakness. (Most assemblers don't need
2556 extern declaration, so we normally output nothing.) Do nothing if
2557 DECL is not external. */
2558
2559 void
2560 assemble_external (tree decl ATTRIBUTE_UNUSED)
2561 {
2562 /* Make sure that the ASM_OUT_FILE is open.
2563 If it's not, we should not be calling this function. */
2564 gcc_assert (asm_out_file);
2565
2566 /* In a perfect world, the following condition would be true.
2567 Sadly, the Go front end emit assembly *from the front end*,
2568 bypassing the call graph. See PR52739. Fix before GCC 4.8. */
2569 #if 0
2570 /* This function should only be called if we are expanding, or have
2571 expanded, to RTL.
2572 Ideally, only final.c would be calling this function, but it is
2573 not clear whether that would break things somehow. See PR 17982
2574 for further discussion. */
2575 gcc_assert (state == EXPANSION
2576 || state == FINISHED);
2577 #endif
2578
2579 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
2580 return;
2581
2582 /* We want to output annotation for weak and external symbols at
2583 very last to check if they are references or not. */
2584
2585 if (TARGET_SUPPORTS_WEAK
2586 && DECL_WEAK (decl)
2587 /* TREE_STATIC is a weird and abused creature which is not
2588 generally the right test for whether an entity has been
2589 locally emitted, inlined or otherwise not-really-extern, but
2590 for declarations that can be weak, it happens to be
2591 match. */
2592 && !TREE_STATIC (decl)
2593 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl))
2594 && value_member (decl, weak_decls) == NULL_TREE)
2595 weak_decls = tree_cons (NULL, decl, weak_decls);
2596
2597 #ifdef ASM_OUTPUT_EXTERNAL
2598 if (pending_assemble_externals_processed)
2599 {
2600 assemble_external_real (decl);
2601 return;
2602 }
2603
2604 if (! pending_assemble_externals_set->add (decl))
2605 pending_assemble_externals = tree_cons (NULL, decl,
2606 pending_assemble_externals);
2607 #endif
2608 }
2609
2610 /* Similar, for calling a library function FUN. */
2611
2612 void
2613 assemble_external_libcall (rtx fun)
2614 {
2615 /* Declare library function name external when first used, if nec. */
2616 if (! SYMBOL_REF_USED (fun))
2617 {
2618 SYMBOL_REF_USED (fun) = 1;
2619 targetm.asm_out.external_libcall (fun);
2620 }
2621 }
2622
2623 /* Assemble a label named NAME. */
2624
2625 void
2626 assemble_label (FILE *file, const char *name)
2627 {
2628 ASM_OUTPUT_LABEL (file, name);
2629 }
2630
2631 /* Set the symbol_referenced flag for ID. */
2632 void
2633 mark_referenced (tree id)
2634 {
2635 TREE_SYMBOL_REFERENCED (id) = 1;
2636 }
2637
2638 /* Set the symbol_referenced flag for DECL and notify callgraph. */
2639 void
2640 mark_decl_referenced (tree decl)
2641 {
2642 if (TREE_CODE (decl) == FUNCTION_DECL)
2643 {
2644 /* Extern inline functions don't become needed when referenced.
2645 If we know a method will be emitted in other TU and no new
2646 functions can be marked reachable, just use the external
2647 definition. */
2648 struct cgraph_node *node = cgraph_node::get_create (decl);
2649 if (!DECL_EXTERNAL (decl)
2650 && !node->definition)
2651 node->mark_force_output ();
2652 }
2653 else if (VAR_P (decl))
2654 {
2655 varpool_node *node = varpool_node::get_create (decl);
2656 /* C++ frontend use mark_decl_references to force COMDAT variables
2657 to be output that might appear dead otherwise. */
2658 node->force_output = true;
2659 }
2660 /* else do nothing - we can get various sorts of CST nodes here,
2661 which do not need to be marked. */
2662 }
2663
2664
2665 /* Output to FILE (an assembly file) a reference to NAME. If NAME
2666 starts with a *, the rest of NAME is output verbatim. Otherwise
2667 NAME is transformed in a target-specific way (usually by the
2668 addition of an underscore). */
2669
2670 void
2671 assemble_name_raw (FILE *file, const char *name)
2672 {
2673 if (name[0] == '*')
2674 fputs (&name[1], file);
2675 else
2676 ASM_OUTPUT_LABELREF (file, name);
2677 }
2678
2679 /* Return NAME that should actually be emitted, looking through
2680 transparent aliases. If NAME refers to an entity that is also
2681 represented as a tree (like a function or variable), mark the entity
2682 as referenced. */
2683 const char *
2684 assemble_name_resolve (const char *name)
2685 {
2686 const char *real_name = targetm.strip_name_encoding (name);
2687 tree id = maybe_get_identifier (real_name);
2688
2689 if (id)
2690 {
2691 tree id_orig = id;
2692
2693 mark_referenced (id);
2694 ultimate_transparent_alias_target (&id);
2695 if (id != id_orig)
2696 name = IDENTIFIER_POINTER (id);
2697 gcc_assert (! TREE_CHAIN (id));
2698 }
2699
2700 return name;
2701 }
2702
2703 /* Like assemble_name_raw, but should be used when NAME might refer to
2704 an entity that is also represented as a tree (like a function or
2705 variable). If NAME does refer to such an entity, that entity will
2706 be marked as referenced. */
2707
2708 void
2709 assemble_name (FILE *file, const char *name)
2710 {
2711 assemble_name_raw (file, assemble_name_resolve (name));
2712 }
2713
2714 /* Allocate SIZE bytes writable static space with a gensym name
2715 and return an RTX to refer to its address. */
2716
2717 rtx
2718 assemble_static_space (unsigned HOST_WIDE_INT size)
2719 {
2720 char name[17];
2721 const char *namestring;
2722 rtx x;
2723
2724 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno);
2725 ++const_labelno;
2726 namestring = ggc_strdup (name);
2727
2728 x = gen_rtx_SYMBOL_REF (Pmode, namestring);
2729 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL;
2730
2731 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL
2732 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size,
2733 BIGGEST_ALIGNMENT);
2734 #else
2735 #ifdef ASM_OUTPUT_ALIGNED_LOCAL
2736 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT);
2737 #else
2738 {
2739 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2740 so that each uninitialized object starts on such a boundary. */
2741 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */
2742 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED
2743 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1)
2744 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2745 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2746 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2747 }
2748 #endif
2749 #endif
2750 return x;
2751 }
2752
2753 /* Assemble the static constant template for function entry trampolines.
2754 This is done at most once per compilation.
2755 Returns an RTX for the address of the template. */
2756
2757 static GTY(()) rtx initial_trampoline;
2758
2759 rtx
2760 assemble_trampoline_template (void)
2761 {
2762 char label[256];
2763 const char *name;
2764 int align;
2765 rtx symbol;
2766
2767 gcc_assert (targetm.asm_out.trampoline_template != NULL);
2768
2769 if (initial_trampoline)
2770 return initial_trampoline;
2771
2772 /* By default, put trampoline templates in read-only data section. */
2773
2774 #ifdef TRAMPOLINE_SECTION
2775 switch_to_section (TRAMPOLINE_SECTION);
2776 #else
2777 switch_to_section (readonly_data_section);
2778 #endif
2779
2780 /* Write the assembler code to define one. */
2781 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
2782 if (align > 0)
2783 ASM_OUTPUT_ALIGN (asm_out_file, align);
2784
2785 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0);
2786 targetm.asm_out.trampoline_template (asm_out_file);
2787
2788 /* Record the rtl to refer to it. */
2789 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
2790 name = ggc_strdup (label);
2791 symbol = gen_rtx_SYMBOL_REF (Pmode, name);
2792 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
2793
2794 initial_trampoline = gen_const_mem (BLKmode, symbol);
2795 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
2796 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
2797
2798 return initial_trampoline;
2799 }
2800 \f
2801 /* A and B are either alignments or offsets. Return the minimum alignment
2802 that may be assumed after adding the two together. */
2803
2804 static inline unsigned
2805 min_align (unsigned int a, unsigned int b)
2806 {
2807 return least_bit_hwi (a | b);
2808 }
2809
2810 /* Return the assembler directive for creating a given kind of integer
2811 object. SIZE is the number of bytes in the object and ALIGNED_P
2812 indicates whether it is known to be aligned. Return NULL if the
2813 assembly dialect has no such directive.
2814
2815 The returned string should be printed at the start of a new line and
2816 be followed immediately by the object's initial value. */
2817
2818 const char *
2819 integer_asm_op (int size, int aligned_p)
2820 {
2821 struct asm_int_op *ops;
2822
2823 if (aligned_p)
2824 ops = &targetm.asm_out.aligned_op;
2825 else
2826 ops = &targetm.asm_out.unaligned_op;
2827
2828 switch (size)
2829 {
2830 case 1:
2831 return targetm.asm_out.byte_op;
2832 case 2:
2833 return ops->hi;
2834 case 3:
2835 return ops->psi;
2836 case 4:
2837 return ops->si;
2838 case 5:
2839 case 6:
2840 case 7:
2841 return ops->pdi;
2842 case 8:
2843 return ops->di;
2844 case 9:
2845 case 10:
2846 case 11:
2847 case 12:
2848 case 13:
2849 case 14:
2850 case 15:
2851 return ops->pti;
2852 case 16:
2853 return ops->ti;
2854 default:
2855 return NULL;
2856 }
2857 }
2858
2859 /* Use directive OP to assemble an integer object X. Print OP at the
2860 start of the line, followed immediately by the value of X. */
2861
2862 void
2863 assemble_integer_with_op (const char *op, rtx x)
2864 {
2865 fputs (op, asm_out_file);
2866 output_addr_const (asm_out_file, x);
2867 fputc ('\n', asm_out_file);
2868 }
2869
2870 /* The default implementation of the asm_out.integer target hook. */
2871
2872 bool
2873 default_assemble_integer (rtx x ATTRIBUTE_UNUSED,
2874 unsigned int size ATTRIBUTE_UNUSED,
2875 int aligned_p ATTRIBUTE_UNUSED)
2876 {
2877 const char *op = integer_asm_op (size, aligned_p);
2878 /* Avoid GAS bugs for large values. Specifically negative values whose
2879 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */
2880 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS)
2881 return false;
2882 return op && (assemble_integer_with_op (op, x), true);
2883 }
2884
2885 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is
2886 the alignment of the integer in bits. Return 1 if we were able to output
2887 the constant, otherwise 0. We must be able to output the constant,
2888 if FORCE is nonzero. */
2889
2890 bool
2891 assemble_integer (rtx x, unsigned int size, unsigned int align, int force)
2892 {
2893 int aligned_p;
2894
2895 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT));
2896
2897 /* See if the target hook can handle this kind of object. */
2898 if (targetm.asm_out.integer (x, size, aligned_p))
2899 return true;
2900
2901 /* If the object is a multi-byte one, try splitting it up. Split
2902 it into words it if is multi-word, otherwise split it into bytes. */
2903 if (size > 1)
2904 {
2905 machine_mode omode, imode;
2906 unsigned int subalign;
2907 unsigned int subsize, i;
2908 enum mode_class mclass;
2909
2910 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
2911 subalign = MIN (align, subsize * BITS_PER_UNIT);
2912 if (GET_CODE (x) == CONST_FIXED)
2913 mclass = GET_MODE_CLASS (GET_MODE (x));
2914 else
2915 mclass = MODE_INT;
2916
2917 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
2918 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
2919
2920 for (i = 0; i < size; i += subsize)
2921 {
2922 rtx partial = simplify_subreg (omode, x, imode, i);
2923 if (!partial || !assemble_integer (partial, subsize, subalign, 0))
2924 break;
2925 }
2926 if (i == size)
2927 return true;
2928
2929 /* If we've printed some of it, but not all of it, there's no going
2930 back now. */
2931 gcc_assert (!i);
2932 }
2933
2934 gcc_assert (!force);
2935
2936 return false;
2937 }
2938 \f
2939 /* Assemble the floating-point constant D into an object of size MODE. ALIGN
2940 is the alignment of the constant in bits. If REVERSE is true, D is output
2941 in reverse storage order. */
2942
2943 void
2944 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
2945 bool reverse)
2946 {
2947 long data[4] = {0, 0, 0, 0};
2948 int bitsize, nelts, nunits, units_per;
2949 rtx elt;
2950
2951 /* This is hairy. We have a quantity of known size. real_to_target
2952 will put it into an array of *host* longs, 32 bits per element
2953 (even if long is more than 32 bits). We need to determine the
2954 number of array elements that are occupied (nelts) and the number
2955 of *target* min-addressable units that will be occupied in the
2956 object file (nunits). We cannot assume that 32 divides the
2957 mode's bitsize (size * BITS_PER_UNIT) evenly.
2958
2959 size * BITS_PER_UNIT is used here to make sure that padding bits
2960 (which might appear at either end of the value; real_to_target
2961 will include the padding bits in its output array) are included. */
2962
2963 nunits = GET_MODE_SIZE (mode);
2964 bitsize = nunits * BITS_PER_UNIT;
2965 nelts = CEIL (bitsize, 32);
2966 units_per = 32 / BITS_PER_UNIT;
2967
2968 real_to_target (data, &d, mode);
2969
2970 /* Put out the first word with the specified alignment. */
2971 unsigned int chunk_nunits = MIN (nunits, units_per);
2972 if (reverse)
2973 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode));
2974 else
2975 elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT));
2976 assemble_integer (elt, chunk_nunits, align, 1);
2977 nunits -= chunk_nunits;
2978
2979 /* Subsequent words need only 32-bit alignment. */
2980 align = min_align (align, 32);
2981
2982 for (int i = 1; i < nelts; i++)
2983 {
2984 chunk_nunits = MIN (nunits, units_per);
2985 if (reverse)
2986 elt = flip_storage_order (SImode,
2987 gen_int_mode (data[nelts - 1 - i], SImode));
2988 else
2989 elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT));
2990 assemble_integer (elt, chunk_nunits, align, 1);
2991 nunits -= chunk_nunits;
2992 }
2993 }
2994 \f
2995 /* Given an expression EXP with a constant value,
2996 reduce it to the sum of an assembler symbol and an integer.
2997 Store them both in the structure *VALUE.
2998 EXP must be reducible. */
2999
3000 class addr_const {
3001 public:
3002 rtx base;
3003 poly_int64 offset;
3004 };
3005
3006 static void
3007 decode_addr_const (tree exp, class addr_const *value)
3008 {
3009 tree target = TREE_OPERAND (exp, 0);
3010 poly_int64 offset = 0;
3011 rtx x;
3012
3013 while (1)
3014 {
3015 poly_int64 bytepos;
3016 if (TREE_CODE (target) == COMPONENT_REF
3017 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
3018 &bytepos))
3019 {
3020 offset += bytepos;
3021 target = TREE_OPERAND (target, 0);
3022 }
3023 else if (TREE_CODE (target) == ARRAY_REF
3024 || TREE_CODE (target) == ARRAY_RANGE_REF)
3025 {
3026 /* Truncate big offset. */
3027 offset
3028 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
3029 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
3030 target = TREE_OPERAND (target, 0);
3031 }
3032 else if (TREE_CODE (target) == MEM_REF
3033 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
3034 {
3035 offset += mem_ref_offset (target).force_shwi ();
3036 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
3037 }
3038 else if (TREE_CODE (target) == INDIRECT_REF
3039 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
3040 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
3041 == ADDR_EXPR)
3042 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0);
3043 else
3044 break;
3045 }
3046
3047 switch (TREE_CODE (target))
3048 {
3049 case VAR_DECL:
3050 case FUNCTION_DECL:
3051 x = DECL_RTL (target);
3052 break;
3053
3054 case LABEL_DECL:
3055 x = gen_rtx_MEM (FUNCTION_MODE,
3056 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target)));
3057 break;
3058
3059 case REAL_CST:
3060 case FIXED_CST:
3061 case STRING_CST:
3062 case COMPLEX_CST:
3063 case CONSTRUCTOR:
3064 case INTEGER_CST:
3065 x = lookup_constant_def (target);
3066 /* Should have been added by output_addressed_constants. */
3067 gcc_assert (x);
3068 break;
3069
3070 case INDIRECT_REF:
3071 /* This deals with absolute addresses. */
3072 offset += tree_to_shwi (TREE_OPERAND (target, 0));
3073 x = gen_rtx_MEM (QImode,
3074 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
3075 break;
3076
3077 case COMPOUND_LITERAL_EXPR:
3078 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
3079 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
3080 break;
3081
3082 default:
3083 gcc_unreachable ();
3084 }
3085
3086 gcc_assert (MEM_P (x));
3087 x = XEXP (x, 0);
3088
3089 value->base = x;
3090 value->offset = offset;
3091 }
3092 \f
3093 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab;
3094
3095 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
3096
3097 /* Constant pool accessor function. */
3098
3099 hash_table<tree_descriptor_hasher> *
3100 constant_pool_htab (void)
3101 {
3102 return const_desc_htab;
3103 }
3104
3105 /* Compute a hash code for a constant expression. */
3106
3107 hashval_t
3108 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr)
3109 {
3110 return ptr->hash;
3111 }
3112
3113 static hashval_t
3114 const_hash_1 (const tree exp)
3115 {
3116 const char *p;
3117 hashval_t hi;
3118 int len, i;
3119 enum tree_code code = TREE_CODE (exp);
3120
3121 /* Either set P and LEN to the address and len of something to hash and
3122 exit the switch or return a value. */
3123
3124 switch (code)
3125 {
3126 case INTEGER_CST:
3127 p = (char *) &TREE_INT_CST_ELT (exp, 0);
3128 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
3129 break;
3130
3131 case REAL_CST:
3132 return real_hash (TREE_REAL_CST_PTR (exp));
3133
3134 case FIXED_CST:
3135 return fixed_hash (TREE_FIXED_CST_PTR (exp));
3136
3137 case STRING_CST:
3138 p = TREE_STRING_POINTER (exp);
3139 len = TREE_STRING_LENGTH (exp);
3140 break;
3141
3142 case COMPLEX_CST:
3143 return (const_hash_1 (TREE_REALPART (exp)) * 5
3144 + const_hash_1 (TREE_IMAGPART (exp)));
3145
3146 case VECTOR_CST:
3147 {
3148 hi = 7 + VECTOR_CST_NPATTERNS (exp);
3149 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
3150 unsigned int count = vector_cst_encoded_nelts (exp);
3151 for (unsigned int i = 0; i < count; ++i)
3152 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
3153 return hi;
3154 }
3155
3156 case CONSTRUCTOR:
3157 {
3158 unsigned HOST_WIDE_INT idx;
3159 tree value;
3160
3161 hi = 5 + int_size_in_bytes (TREE_TYPE (exp));
3162
3163 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
3164 if (value)
3165 hi = hi * 603 + const_hash_1 (value);
3166
3167 return hi;
3168 }
3169
3170 case ADDR_EXPR:
3171 if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0)))
3172 return const_hash_1 (TREE_OPERAND (exp, 0));
3173
3174 /* Fallthru. */
3175 case FDESC_EXPR:
3176 {
3177 class addr_const value;
3178
3179 decode_addr_const (exp, &value);
3180 switch (GET_CODE (value.base))
3181 {
3182 case SYMBOL_REF:
3183 /* Don't hash the address of the SYMBOL_REF;
3184 only use the offset and the symbol name. */
3185 hi = value.offset.coeffs[0];
3186 p = XSTR (value.base, 0);
3187 for (i = 0; p[i] != 0; i++)
3188 hi = ((hi * 613) + (unsigned) (p[i]));
3189 break;
3190
3191 case LABEL_REF:
3192 hi = (value.offset.coeffs[0]
3193 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
3194 break;
3195
3196 default:
3197 gcc_unreachable ();
3198 }
3199 }
3200 return hi;
3201
3202 case PLUS_EXPR:
3203 case POINTER_PLUS_EXPR:
3204 case MINUS_EXPR:
3205 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
3206 + const_hash_1 (TREE_OPERAND (exp, 1)));
3207
3208 CASE_CONVERT:
3209 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
3210
3211 default:
3212 /* A language specific constant. Just hash the code. */
3213 return code;
3214 }
3215
3216 /* Compute hashing function. */
3217 hi = len;
3218 for (i = 0; i < len; i++)
3219 hi = ((hi * 613) + (unsigned) (p[i]));
3220
3221 return hi;
3222 }
3223
3224 /* Wrapper of compare_constant, for the htab interface. */
3225 bool
3226 tree_descriptor_hasher::equal (constant_descriptor_tree *c1,
3227 constant_descriptor_tree *c2)
3228 {
3229 if (c1->hash != c2->hash)
3230 return 0;
3231 return compare_constant (c1->value, c2->value);
3232 }
3233
3234 /* Compare t1 and t2, and return 1 only if they are known to result in
3235 the same bit pattern on output. */
3236
3237 static int
3238 compare_constant (const tree t1, const tree t2)
3239 {
3240 enum tree_code typecode;
3241
3242 if (t1 == NULL_TREE)
3243 return t2 == NULL_TREE;
3244 if (t2 == NULL_TREE)
3245 return 0;
3246
3247 if (TREE_CODE (t1) != TREE_CODE (t2))
3248 return 0;
3249
3250 switch (TREE_CODE (t1))
3251 {
3252 case INTEGER_CST:
3253 /* Integer constants are the same only if the same width of type. */
3254 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3255 return 0;
3256 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3257 return 0;
3258 return tree_int_cst_equal (t1, t2);
3259
3260 case REAL_CST:
3261 /* Real constants are the same only if the same width of type. In
3262 addition to the same width, we need to check whether the modes are the
3263 same. There might be two floating point modes that are the same size
3264 but have different representations, such as the PowerPC that has 2
3265 different 128-bit floating point types (IBM extended double and IEEE
3266 128-bit floating point). */
3267 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3268 return 0;
3269 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3270 return 0;
3271 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3272
3273 case FIXED_CST:
3274 /* Fixed constants are the same only if the same width of type. */
3275 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3276 return 0;
3277
3278 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
3279
3280 case STRING_CST:
3281 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3282 || int_size_in_bytes (TREE_TYPE (t1))
3283 != int_size_in_bytes (TREE_TYPE (t2)))
3284 return 0;
3285
3286 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3287 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3288 TREE_STRING_LENGTH (t1)));
3289
3290 case COMPLEX_CST:
3291 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2))
3292 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2)));
3293
3294 case VECTOR_CST:
3295 {
3296 if (VECTOR_CST_NPATTERNS (t1)
3297 != VECTOR_CST_NPATTERNS (t2))
3298 return 0;
3299
3300 if (VECTOR_CST_NELTS_PER_PATTERN (t1)
3301 != VECTOR_CST_NELTS_PER_PATTERN (t2))
3302 return 0;
3303
3304 unsigned int count = vector_cst_encoded_nelts (t1);
3305 for (unsigned int i = 0; i < count; ++i)
3306 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
3307 VECTOR_CST_ENCODED_ELT (t2, i)))
3308 return 0;
3309
3310 return 1;
3311 }
3312
3313 case CONSTRUCTOR:
3314 {
3315 vec<constructor_elt, va_gc> *v1, *v2;
3316 unsigned HOST_WIDE_INT idx;
3317
3318 typecode = TREE_CODE (TREE_TYPE (t1));
3319 if (typecode != TREE_CODE (TREE_TYPE (t2)))
3320 return 0;
3321
3322 if (typecode == ARRAY_TYPE)
3323 {
3324 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1));
3325 /* For arrays, check that mode, size and storage order match. */
3326 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3327 || size_1 == -1
3328 || size_1 != int_size_in_bytes (TREE_TYPE (t2))
3329 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1))
3330 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2)))
3331 return 0;
3332 }
3333 else
3334 {
3335 /* For record and union constructors, require exact type
3336 equality. */
3337 if (TREE_TYPE (t1) != TREE_TYPE (t2))
3338 return 0;
3339 }
3340
3341 v1 = CONSTRUCTOR_ELTS (t1);
3342 v2 = CONSTRUCTOR_ELTS (t2);
3343 if (vec_safe_length (v1) != vec_safe_length (v2))
3344 return 0;
3345
3346 for (idx = 0; idx < vec_safe_length (v1); ++idx)
3347 {
3348 constructor_elt *c1 = &(*v1)[idx];
3349 constructor_elt *c2 = &(*v2)[idx];
3350
3351 /* Check that each value is the same... */
3352 if (!compare_constant (c1->value, c2->value))
3353 return 0;
3354 /* ... and that they apply to the same fields! */
3355 if (typecode == ARRAY_TYPE)
3356 {
3357 if (!compare_constant (c1->index, c2->index))
3358 return 0;
3359 }
3360 else
3361 {
3362 if (c1->index != c2->index)
3363 return 0;
3364 }
3365 }
3366
3367 return 1;
3368 }
3369
3370 case ADDR_EXPR:
3371 case FDESC_EXPR:
3372 {
3373 class addr_const value1, value2;
3374 enum rtx_code code;
3375 int ret;
3376
3377 decode_addr_const (t1, &value1);
3378 decode_addr_const (t2, &value2);
3379
3380 if (maybe_ne (value1.offset, value2.offset))
3381 return 0;
3382
3383 code = GET_CODE (value1.base);
3384 if (code != GET_CODE (value2.base))
3385 return 0;
3386
3387 switch (code)
3388 {
3389 case SYMBOL_REF:
3390 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0);
3391 break;
3392
3393 case LABEL_REF:
3394 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base))
3395 == CODE_LABEL_NUMBER (label_ref_label (value2.base)));
3396 break;
3397
3398 default:
3399 gcc_unreachable ();
3400 }
3401 return ret;
3402 }
3403
3404 case PLUS_EXPR:
3405 case POINTER_PLUS_EXPR:
3406 case MINUS_EXPR:
3407 case RANGE_EXPR:
3408 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3409 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3410
3411 CASE_CONVERT:
3412 case VIEW_CONVERT_EXPR:
3413 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3414
3415 default:
3416 return 0;
3417 }
3418
3419 gcc_unreachable ();
3420 }
3421 \f
3422 /* Return the section into which constant EXP should be placed. */
3423
3424 static section *
3425 get_constant_section (tree exp, unsigned int align)
3426 {
3427 return targetm.asm_out.select_section (exp,
3428 compute_reloc_for_constant (exp),
3429 align);
3430 }
3431
3432 /* Return the size of constant EXP in bytes. */
3433
3434 static HOST_WIDE_INT
3435 get_constant_size (tree exp)
3436 {
3437 HOST_WIDE_INT size;
3438
3439 size = int_size_in_bytes (TREE_TYPE (exp));
3440 gcc_checking_assert (size >= 0);
3441 gcc_checking_assert (TREE_CODE (exp) != STRING_CST
3442 || size >= TREE_STRING_LENGTH (exp));
3443 return size;
3444 }
3445
3446 /* Subroutine of output_constant_def:
3447 No constant equal to EXP is known to have been output.
3448 Make a constant descriptor to enter EXP in the hash table.
3449 Assign the label number and construct RTL to refer to the
3450 constant's location in memory.
3451 Caller is responsible for updating the hash table. */
3452
3453 static struct constant_descriptor_tree *
3454 build_constant_desc (tree exp)
3455 {
3456 struct constant_descriptor_tree *desc;
3457 rtx symbol, rtl;
3458 char label[256];
3459 int labelno;
3460 tree decl;
3461
3462 desc = ggc_alloc<constant_descriptor_tree> ();
3463 desc->value = exp;
3464
3465 /* Create a string containing the label name, in LABEL. */
3466 labelno = const_labelno++;
3467 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
3468
3469 /* Construct the VAR_DECL associated with the constant. */
3470 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label),
3471 TREE_TYPE (exp));
3472 DECL_ARTIFICIAL (decl) = 1;
3473 DECL_IGNORED_P (decl) = 1;
3474 TREE_READONLY (decl) = 1;
3475 TREE_STATIC (decl) = 1;
3476 TREE_ADDRESSABLE (decl) = 1;
3477 /* We don't set the RTL yet as this would cause varpool to assume that the
3478 variable is referenced. Moreover, it would just be dropped in LTO mode.
3479 Instead we set the flag that will be recognized in make_decl_rtl. */
3480 DECL_IN_CONSTANT_POOL (decl) = 1;
3481 DECL_INITIAL (decl) = desc->value;
3482 /* ??? targetm.constant_alignment hasn't been updated for vector types on
3483 most architectures so use DATA_ALIGNMENT as well, except for strings. */
3484 if (TREE_CODE (exp) == STRING_CST)
3485 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
3486 else
3487 {
3488 align_variable (decl, 0);
3489 if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl))
3490 && ((optab_handler (movmisalign_optab, DECL_MODE (decl))
3491 != CODE_FOR_nothing)
3492 || targetm.slow_unaligned_access (DECL_MODE (decl),
3493 DECL_ALIGN (decl))))
3494 SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl)));
3495 }
3496
3497 /* Now construct the SYMBOL_REF and the MEM. */
3498 if (use_object_blocks_p ())
3499 {
3500 int align = (TREE_CODE (decl) == CONST_DECL
3501 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3502 ? DECL_ALIGN (decl)
3503 : symtab_node::get (decl)->definition_alignment ());
3504 section *sect = get_constant_section (exp, align);
3505 symbol = create_block_symbol (ggc_strdup (label),
3506 get_block_for_section (sect), -1);
3507 }
3508 else
3509 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3510 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3511 SET_SYMBOL_REF_DECL (symbol, decl);
3512 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3513
3514 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol);
3515 set_mem_alias_set (rtl, 0);
3516
3517 /* Putting EXP into the literal pool might have imposed a different
3518 alignment which should be visible in the RTX as well. */
3519 set_mem_align (rtl, DECL_ALIGN (decl));
3520
3521 /* We cannot share RTX'es in pool entries.
3522 Mark this piece of RTL as required for unsharing. */
3523 RTX_FLAG (rtl, used) = 1;
3524
3525 /* Set flags or add text to the name to record information, such as
3526 that it is a local symbol. If the name is changed, the macro
3527 ASM_OUTPUT_LABELREF will have to know how to strip this
3528 information. This call might invalidate our local variable
3529 SYMBOL; we can't use it afterward. */
3530 targetm.encode_section_info (exp, rtl, true);
3531
3532 desc->rtl = rtl;
3533
3534 return desc;
3535 }
3536
3537 /* Subroutine of output_constant_def and tree_output_constant_def:
3538 Add a constant to the hash table that tracks which constants
3539 already have labels. */
3540
3541 static constant_descriptor_tree *
3542 add_constant_to_table (tree exp, int defer)
3543 {
3544 /* The hash table methods may call output_constant_def for addressed
3545 constants, so handle them first. */
3546 output_addressed_constants (exp, defer);
3547
3548 /* Sanity check to catch recursive insertion. */
3549 static bool inserting;
3550 gcc_assert (!inserting);
3551 inserting = true;
3552
3553 /* Look up EXP in the table of constant descriptors. If we didn't
3554 find it, create a new one. */
3555 struct constant_descriptor_tree key;
3556 key.value = exp;
3557 key.hash = const_hash_1 (exp);
3558 constant_descriptor_tree **loc
3559 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3560
3561 inserting = false;
3562
3563 struct constant_descriptor_tree *desc = *loc;
3564 if (!desc)
3565 {
3566 desc = build_constant_desc (exp);
3567 desc->hash = key.hash;
3568 *loc = desc;
3569 }
3570
3571 return desc;
3572 }
3573
3574 /* Return an rtx representing a reference to constant data in memory
3575 for the constant expression EXP.
3576
3577 If assembler code for such a constant has already been output,
3578 return an rtx to refer to it.
3579 Otherwise, output such a constant in memory
3580 and generate an rtx for it.
3581
3582 If DEFER is nonzero, this constant can be deferred and output only
3583 if referenced in the function after all optimizations.
3584
3585 `const_desc_table' records which constants already have label strings. */
3586
3587 rtx
3588 output_constant_def (tree exp, int defer)
3589 {
3590 struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer);
3591 maybe_output_constant_def_contents (desc, defer);
3592 return desc->rtl;
3593 }
3594
3595 /* Subroutine of output_constant_def: Decide whether or not we need to
3596 output the constant DESC now, and if so, do it. */
3597 static void
3598 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc,
3599 int defer)
3600 {
3601 rtx symbol = XEXP (desc->rtl, 0);
3602 tree exp = desc->value;
3603
3604 if (flag_syntax_only)
3605 return;
3606
3607 if (TREE_ASM_WRITTEN (exp))
3608 /* Already output; don't do it again. */
3609 return;
3610
3611 /* We can always defer constants as long as the context allows
3612 doing so. */
3613 if (defer)
3614 {
3615 /* Increment n_deferred_constants if it exists. It needs to be at
3616 least as large as the number of constants actually referred to
3617 by the function. If it's too small we'll stop looking too early
3618 and fail to emit constants; if it's too large we'll only look
3619 through the entire function when we could have stopped earlier. */
3620 if (cfun)
3621 n_deferred_constants++;
3622 return;
3623 }
3624
3625 output_constant_def_contents (symbol);
3626 }
3627
3628 /* Subroutine of output_constant_def_contents. Output the definition
3629 of constant EXP, which is pointed to by label LABEL. ALIGN is the
3630 constant's alignment in bits. */
3631
3632 static void
3633 assemble_constant_contents (tree exp, const char *label, unsigned int align,
3634 bool merge_strings)
3635 {
3636 HOST_WIDE_INT size;
3637
3638 size = get_constant_size (exp);
3639
3640 /* Do any machine/system dependent processing of the constant. */
3641 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
3642
3643 /* Output the value of EXP. */
3644 output_constant (exp, size, align, false, merge_strings);
3645
3646 targetm.asm_out.decl_end ();
3647 }
3648
3649 /* We must output the constant data referred to by SYMBOL; do so. */
3650
3651 static void
3652 output_constant_def_contents (rtx symbol)
3653 {
3654 tree decl = SYMBOL_REF_DECL (symbol);
3655 tree exp = DECL_INITIAL (decl);
3656 bool asan_protected = false;
3657
3658 /* Make sure any other constants whose addresses appear in EXP
3659 are assigned label numbers. */
3660 output_addressed_constants (exp, 0);
3661
3662 /* We are no longer deferring this constant. */
3663 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1;
3664
3665 if ((flag_sanitize & SANITIZE_ADDRESS)
3666 && TREE_CODE (exp) == STRING_CST
3667 && asan_protect_global (exp))
3668 {
3669 asan_protected = true;
3670 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
3671 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
3672 }
3673
3674 /* If the constant is part of an object block, make sure that the
3675 decl has been positioned within its block, but do not write out
3676 its definition yet. output_object_blocks will do that later. */
3677 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
3678 place_block_symbol (symbol);
3679 else
3680 {
3681 int align = (TREE_CODE (decl) == CONST_DECL
3682 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3683 ? DECL_ALIGN (decl)
3684 : symtab_node::get (decl)->definition_alignment ());
3685 section *sect = get_constant_section (exp, align);
3686 switch_to_section (sect);
3687 if (align > BITS_PER_UNIT)
3688 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
3689 assemble_constant_contents (exp, XSTR (symbol, 0), align,
3690 (sect->common.flags & SECTION_MERGE)
3691 && (sect->common.flags & SECTION_STRINGS));
3692 if (asan_protected)
3693 {
3694 HOST_WIDE_INT size = get_constant_size (exp);
3695 assemble_zeros (asan_red_zone_size (size));
3696 }
3697 }
3698 }
3699
3700 /* Look up EXP in the table of constant descriptors. Return the rtl
3701 if it has been emitted, else null. */
3702
3703 rtx
3704 lookup_constant_def (tree exp)
3705 {
3706 struct constant_descriptor_tree key;
3707
3708 key.value = exp;
3709 key.hash = const_hash_1 (exp);
3710 constant_descriptor_tree *desc
3711 = const_desc_htab->find_with_hash (&key, key.hash);
3712
3713 return (desc ? desc->rtl : NULL_RTX);
3714 }
3715
3716 /* Return a tree representing a reference to constant data in memory
3717 for the constant expression EXP.
3718
3719 This is the counterpart of output_constant_def at the Tree level. */
3720
3721 tree
3722 tree_output_constant_def (tree exp)
3723 {
3724 struct constant_descriptor_tree *desc = add_constant_to_table (exp, 1);
3725 tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
3726 varpool_node::finalize_decl (decl);
3727 return decl;
3728 }
3729 \f
3730 class GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx {
3731 public:
3732 class constant_descriptor_rtx *next;
3733 rtx mem;
3734 rtx sym;
3735 rtx constant;
3736 HOST_WIDE_INT offset;
3737 hashval_t hash;
3738 fixed_size_mode mode;
3739 unsigned int align;
3740 int labelno;
3741 int mark;
3742 };
3743
3744 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx>
3745 {
3746 static hashval_t hash (constant_descriptor_rtx *);
3747 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *);
3748 };
3749
3750 /* Used in the hash tables to avoid outputting the same constant
3751 twice. Unlike 'struct constant_descriptor_tree', RTX constants
3752 are output once per function, not once per file. */
3753 /* ??? Only a few targets need per-function constant pools. Most
3754 can use one per-file pool. Should add a targetm bit to tell the
3755 difference. */
3756
3757 struct GTY(()) rtx_constant_pool {
3758 /* Pointers to first and last constant in pool, as ordered by offset. */
3759 class constant_descriptor_rtx *first;
3760 class constant_descriptor_rtx *last;
3761
3762 /* Hash facility for making memory-constants from constant rtl-expressions.
3763 It is used on RISC machines where immediate integer arguments and
3764 constant addresses are restricted so that such constants must be stored
3765 in memory. */
3766 hash_table<const_rtx_desc_hasher> *const_rtx_htab;
3767
3768 /* Current offset in constant pool (does not include any
3769 machine-specific header). */
3770 HOST_WIDE_INT offset;
3771 };
3772
3773 /* Hash and compare functions for const_rtx_htab. */
3774
3775 hashval_t
3776 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc)
3777 {
3778 return desc->hash;
3779 }
3780
3781 bool
3782 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x,
3783 constant_descriptor_rtx *y)
3784 {
3785 if (x->mode != y->mode)
3786 return 0;
3787 return rtx_equal_p (x->constant, y->constant);
3788 }
3789
3790 /* Hash one component of a constant. */
3791
3792 static hashval_t
3793 const_rtx_hash_1 (const_rtx x)
3794 {
3795 unsigned HOST_WIDE_INT hwi;
3796 machine_mode mode;
3797 enum rtx_code code;
3798 hashval_t h;
3799 int i;
3800
3801 code = GET_CODE (x);
3802 mode = GET_MODE (x);
3803 h = (hashval_t) code * 1048573 + mode;
3804
3805 switch (code)
3806 {
3807 case CONST_INT:
3808 hwi = INTVAL (x);
3809
3810 fold_hwi:
3811 {
3812 int shift = sizeof (hashval_t) * CHAR_BIT;
3813 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
3814
3815 h ^= (hashval_t) hwi;
3816 for (i = 1; i < n; ++i)
3817 {
3818 hwi >>= shift;
3819 h ^= (hashval_t) hwi;
3820 }
3821 }
3822 break;
3823
3824 case CONST_WIDE_INT:
3825 hwi = 0;
3826 {
3827 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
3828 hwi ^= CONST_WIDE_INT_ELT (x, i);
3829 goto fold_hwi;
3830 }
3831
3832 case CONST_DOUBLE:
3833 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
3834 {
3835 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
3836 goto fold_hwi;
3837 }
3838 else
3839 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
3840 break;
3841
3842 case CONST_FIXED:
3843 h ^= fixed_hash (CONST_FIXED_VALUE (x));
3844 break;
3845
3846 case SYMBOL_REF:
3847 h ^= htab_hash_string (XSTR (x, 0));
3848 break;
3849
3850 case LABEL_REF:
3851 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x));
3852 break;
3853
3854 case UNSPEC:
3855 case UNSPEC_VOLATILE:
3856 h = h * 251 + XINT (x, 1);
3857 break;
3858
3859 default:
3860 break;
3861 }
3862
3863 return h;
3864 }
3865
3866 /* Compute a hash value for X, which should be a constant. */
3867
3868 static hashval_t
3869 const_rtx_hash (rtx x)
3870 {
3871 hashval_t h = 0;
3872 subrtx_iterator::array_type array;
3873 FOR_EACH_SUBRTX (iter, array, x, ALL)
3874 h = h * 509 + const_rtx_hash_1 (*iter);
3875 return h;
3876 }
3877
3878 \f
3879 /* Create and return a new rtx constant pool. */
3880
3881 static struct rtx_constant_pool *
3882 create_constant_pool (void)
3883 {
3884 struct rtx_constant_pool *pool;
3885
3886 pool = ggc_alloc<rtx_constant_pool> ();
3887 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31);
3888 pool->first = NULL;
3889 pool->last = NULL;
3890 pool->offset = 0;
3891 return pool;
3892 }
3893
3894 /* Initialize constant pool hashing for a new function. */
3895
3896 void
3897 init_varasm_status (void)
3898 {
3899 crtl->varasm.pool = create_constant_pool ();
3900 crtl->varasm.deferred_constants = 0;
3901 }
3902 \f
3903 /* Given a MINUS expression, simplify it if both sides
3904 include the same symbol. */
3905
3906 rtx
3907 simplify_subtraction (rtx x)
3908 {
3909 rtx r = simplify_rtx (x);
3910 return r ? r : x;
3911 }
3912 \f
3913 /* Given a constant rtx X, make (or find) a memory constant for its value
3914 and return a MEM rtx to refer to it in memory. IN_MODE is the mode
3915 of X. */
3916
3917 rtx
3918 force_const_mem (machine_mode in_mode, rtx x)
3919 {
3920 class constant_descriptor_rtx *desc, tmp;
3921 struct rtx_constant_pool *pool;
3922 char label[256];
3923 rtx def, symbol;
3924 hashval_t hash;
3925 unsigned int align;
3926 constant_descriptor_rtx **slot;
3927 fixed_size_mode mode;
3928
3929 /* We can't force variable-sized objects to memory. */
3930 if (!is_a <fixed_size_mode> (in_mode, &mode))
3931 return NULL_RTX;
3932
3933 /* If we're not allowed to drop X into the constant pool, don't. */
3934 if (targetm.cannot_force_const_mem (mode, x))
3935 return NULL_RTX;
3936
3937 /* Record that this function has used a constant pool entry. */
3938 crtl->uses_const_pool = 1;
3939
3940 /* Decide which pool to use. */
3941 pool = (targetm.use_blocks_for_constant_p (mode, x)
3942 ? shared_constant_pool
3943 : crtl->varasm.pool);
3944
3945 /* Lookup the value in the hashtable. */
3946 tmp.constant = x;
3947 tmp.mode = mode;
3948 hash = const_rtx_hash (x);
3949 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT);
3950 desc = *slot;
3951
3952 /* If the constant was already present, return its memory. */
3953 if (desc)
3954 return copy_rtx (desc->mem);
3955
3956 /* Otherwise, create a new descriptor. */
3957 desc = ggc_alloc<constant_descriptor_rtx> ();
3958 *slot = desc;
3959
3960 /* Align the location counter as required by EXP's data type. */
3961 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
3962 align = targetm.static_rtx_alignment (align_mode);
3963
3964 pool->offset += (align / BITS_PER_UNIT) - 1;
3965 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
3966
3967 desc->next = NULL;
3968 desc->constant = copy_rtx (tmp.constant);
3969 desc->offset = pool->offset;
3970 desc->hash = hash;
3971 desc->mode = mode;
3972 desc->align = align;
3973 desc->labelno = const_labelno;
3974 desc->mark = 0;
3975
3976 pool->offset += GET_MODE_SIZE (mode);
3977 if (pool->last)
3978 pool->last->next = desc;
3979 else
3980 pool->first = pool->last = desc;
3981 pool->last = desc;
3982
3983 /* Create a string containing the label name, in LABEL. */
3984 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
3985 ++const_labelno;
3986
3987 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to
3988 the constants pool. */
3989 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
3990 {
3991 section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
3992 symbol = create_block_symbol (ggc_strdup (label),
3993 get_block_for_section (sect), -1);
3994 }
3995 else
3996 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3997 desc->sym = symbol;
3998 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3999 CONSTANT_POOL_ADDRESS_P (symbol) = 1;
4000 SET_SYMBOL_REF_CONSTANT (symbol, desc);
4001
4002 /* Construct the MEM. */
4003 desc->mem = def = gen_const_mem (mode, symbol);
4004 set_mem_align (def, align);
4005
4006 /* If we're dropping a label to the constant pool, make sure we
4007 don't delete it. */
4008 if (GET_CODE (x) == LABEL_REF)
4009 LABEL_PRESERVE_P (XEXP (x, 0)) = 1;
4010
4011 return copy_rtx (def);
4012 }
4013 \f
4014 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */
4015
4016 rtx
4017 get_pool_constant (const_rtx addr)
4018 {
4019 return SYMBOL_REF_CONSTANT (addr)->constant;
4020 }
4021
4022 /* Given a constant pool SYMBOL_REF, return the corresponding constant
4023 and whether it has been output or not. */
4024
4025 rtx
4026 get_pool_constant_mark (rtx addr, bool *pmarked)
4027 {
4028 class constant_descriptor_rtx *desc;
4029
4030 desc = SYMBOL_REF_CONSTANT (addr);
4031 *pmarked = (desc->mark != 0);
4032 return desc->constant;
4033 }
4034
4035 /* Similar, return the mode. */
4036
4037 fixed_size_mode
4038 get_pool_mode (const_rtx addr)
4039 {
4040 return SYMBOL_REF_CONSTANT (addr)->mode;
4041 }
4042
4043 /* Return TRUE if and only if the constant pool has no entries. Note
4044 that even entries we might end up choosing not to emit are counted
4045 here, so there is the potential for missed optimizations. */
4046
4047 bool
4048 constant_pool_empty_p (void)
4049 {
4050 return crtl->varasm.pool->first == NULL;
4051 }
4052 \f
4053 /* Worker function for output_constant_pool_1. Emit assembly for X
4054 in MODE with known alignment ALIGN. */
4055
4056 static void
4057 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
4058 {
4059 switch (GET_MODE_CLASS (mode))
4060 {
4061 case MODE_FLOAT:
4062 case MODE_DECIMAL_FLOAT:
4063 {
4064 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
4065 assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
4066 as_a <scalar_float_mode> (mode), align, false);
4067 break;
4068 }
4069
4070 case MODE_INT:
4071 case MODE_PARTIAL_INT:
4072 case MODE_FRACT:
4073 case MODE_UFRACT:
4074 case MODE_ACCUM:
4075 case MODE_UACCUM:
4076 assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
4077 break;
4078
4079 case MODE_VECTOR_BOOL:
4080 {
4081 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4082
4083 /* Pick the smallest integer mode that contains at least one
4084 whole element. Often this is byte_mode and contains more
4085 than one element. */
4086 unsigned int nelts = GET_MODE_NUNITS (mode);
4087 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
4088 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
4089 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
4090
4091 /* Build the constant up one integer at a time. */
4092 unsigned int elts_per_int = int_bits / elt_bits;
4093 for (unsigned int i = 0; i < nelts; i += elts_per_int)
4094 {
4095 unsigned HOST_WIDE_INT value = 0;
4096 unsigned int limit = MIN (nelts - i, elts_per_int);
4097 for (unsigned int j = 0; j < limit; ++j)
4098 if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0)
4099 value |= 1 << (j * elt_bits);
4100 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
4101 i != 0 ? MIN (align, int_bits) : align);
4102 }
4103 break;
4104 }
4105 case MODE_VECTOR_FLOAT:
4106 case MODE_VECTOR_INT:
4107 case MODE_VECTOR_FRACT:
4108 case MODE_VECTOR_UFRACT:
4109 case MODE_VECTOR_ACCUM:
4110 case MODE_VECTOR_UACCUM:
4111 {
4112 int i, units;
4113 scalar_mode submode = GET_MODE_INNER (mode);
4114 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
4115
4116 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4117 units = GET_MODE_NUNITS (mode);
4118
4119 for (i = 0; i < units; i++)
4120 {
4121 rtx elt = CONST_VECTOR_ELT (x, i);
4122 output_constant_pool_2 (submode, elt, i ? subalign : align);
4123 }
4124 }
4125 break;
4126
4127 default:
4128 gcc_unreachable ();
4129 }
4130 }
4131
4132 /* Worker function for output_constant_pool. Emit constant DESC,
4133 giving it ALIGN bits of alignment. */
4134
4135 static void
4136 output_constant_pool_1 (class constant_descriptor_rtx *desc,
4137 unsigned int align)
4138 {
4139 rtx x, tmp;
4140
4141 x = desc->constant;
4142
4143 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
4144 whose CODE_LABEL has been deleted. This can occur if a jump table
4145 is eliminated by optimization. If so, write a constant of zero
4146 instead. Note that this can also happen by turning the
4147 CODE_LABEL into a NOTE. */
4148 /* ??? This seems completely and utterly wrong. Certainly it's
4149 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper
4150 functioning even with rtx_insn::deleted and friends. */
4151
4152 tmp = x;
4153 switch (GET_CODE (tmp))
4154 {
4155 case CONST:
4156 if (GET_CODE (XEXP (tmp, 0)) != PLUS
4157 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
4158 break;
4159 tmp = XEXP (XEXP (tmp, 0), 0);
4160 /* FALLTHRU */
4161
4162 case LABEL_REF:
4163 {
4164 rtx_insn *insn = label_ref_label (tmp);
4165 gcc_assert (!insn->deleted ());
4166 gcc_assert (!NOTE_P (insn)
4167 || NOTE_KIND (insn) != NOTE_INSN_DELETED);
4168 break;
4169 }
4170
4171 default:
4172 break;
4173 }
4174
4175 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4176 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
4177 align, desc->labelno, done);
4178 #endif
4179
4180 assemble_align (align);
4181
4182 /* Output the label. */
4183 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
4184
4185 /* Output the data.
4186 Pass actual alignment value while emitting string constant to asm code
4187 as function 'output_constant_pool_1' explicitly passes the alignment as 1
4188 assuming that the data is already aligned which prevents the generation
4189 of fix-up table entries. */
4190 output_constant_pool_2 (desc->mode, x, desc->align);
4191
4192 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
4193 sections have proper size. */
4194 if (align > GET_MODE_BITSIZE (desc->mode)
4195 && in_section
4196 && (in_section->common.flags & SECTION_MERGE))
4197 assemble_align (align);
4198
4199 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4200 done:
4201 #endif
4202 return;
4203 }
4204
4205 /* Recompute the offsets of entries in POOL, and the overall size of
4206 POOL. Do this after calling mark_constant_pool to ensure that we
4207 are computing the offset values for the pool which we will actually
4208 emit. */
4209
4210 static void
4211 recompute_pool_offsets (struct rtx_constant_pool *pool)
4212 {
4213 class constant_descriptor_rtx *desc;
4214 pool->offset = 0;
4215
4216 for (desc = pool->first; desc ; desc = desc->next)
4217 if (desc->mark)
4218 {
4219 /* Recalculate offset. */
4220 unsigned int align = desc->align;
4221 pool->offset += (align / BITS_PER_UNIT) - 1;
4222 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
4223 desc->offset = pool->offset;
4224 pool->offset += GET_MODE_SIZE (desc->mode);
4225 }
4226 }
4227
4228 /* Mark all constants that are referenced by SYMBOL_REFs in X.
4229 Emit referenced deferred strings. */
4230
4231 static void
4232 mark_constants_in_pattern (rtx insn)
4233 {
4234 subrtx_iterator::array_type array;
4235 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4236 {
4237 const_rtx x = *iter;
4238 if (GET_CODE (x) == SYMBOL_REF)
4239 {
4240 if (CONSTANT_POOL_ADDRESS_P (x))
4241 {
4242 class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x);
4243 if (desc->mark == 0)
4244 {
4245 desc->mark = 1;
4246 iter.substitute (desc->constant);
4247 }
4248 }
4249 else if (TREE_CONSTANT_POOL_ADDRESS_P (x))
4250 {
4251 tree decl = SYMBOL_REF_DECL (x);
4252 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
4253 {
4254 n_deferred_constants--;
4255 output_constant_def_contents (CONST_CAST_RTX (x));
4256 }
4257 }
4258 }
4259 }
4260 }
4261
4262 /* Look through appropriate parts of INSN, marking all entries in the
4263 constant pool which are actually being used. Entries that are only
4264 referenced by other constants are also marked as used. Emit
4265 deferred strings that are used. */
4266
4267 static void
4268 mark_constants (rtx_insn *insn)
4269 {
4270 if (!INSN_P (insn))
4271 return;
4272
4273 /* Insns may appear inside a SEQUENCE. Only check the patterns of
4274 insns, not any notes that may be attached. We don't want to mark
4275 a constant just because it happens to appear in a REG_EQUIV note. */
4276 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
4277 {
4278 int i, n = seq->len ();
4279 for (i = 0; i < n; ++i)
4280 {
4281 rtx subinsn = seq->element (i);
4282 if (INSN_P (subinsn))
4283 mark_constants_in_pattern (subinsn);
4284 }
4285 }
4286 else
4287 mark_constants_in_pattern (insn);
4288 }
4289
4290 /* Look through the instructions for this function, and mark all the
4291 entries in POOL which are actually being used. Emit deferred constants
4292 which have indeed been used. */
4293
4294 static void
4295 mark_constant_pool (void)
4296 {
4297 rtx_insn *insn;
4298
4299 if (!crtl->uses_const_pool && n_deferred_constants == 0)
4300 return;
4301
4302 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4303 mark_constants (insn);
4304 }
4305
4306 /* Write all the constants in POOL. */
4307
4308 static void
4309 output_constant_pool_contents (struct rtx_constant_pool *pool)
4310 {
4311 class constant_descriptor_rtx *desc;
4312
4313 for (desc = pool->first; desc ; desc = desc->next)
4314 if (desc->mark < 0)
4315 {
4316 #ifdef ASM_OUTPUT_DEF
4317 const char *name = XSTR (desc->sym, 0);
4318 char label[256];
4319 char buffer[256 + 32];
4320 const char *p;
4321
4322 ASM_GENERATE_INTERNAL_LABEL (label, "LC", ~desc->mark);
4323 p = label;
4324 if (desc->offset)
4325 {
4326 sprintf (buffer, "%s+%ld", p, (long) (desc->offset));
4327 p = buffer;
4328 }
4329 ASM_OUTPUT_DEF (asm_out_file, name, p);
4330 #else
4331 gcc_unreachable ();
4332 #endif
4333 }
4334 else if (desc->mark)
4335 {
4336 /* If the constant is part of an object_block, make sure that
4337 the constant has been positioned within its block, but do not
4338 write out its definition yet. output_object_blocks will do
4339 that later. */
4340 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4341 && SYMBOL_REF_BLOCK (desc->sym))
4342 place_block_symbol (desc->sym);
4343 else
4344 {
4345 switch_to_section (targetm.asm_out.select_rtx_section
4346 (desc->mode, desc->constant, desc->align));
4347 output_constant_pool_1 (desc, desc->align);
4348 }
4349 }
4350 }
4351
4352 struct constant_descriptor_rtx_data {
4353 constant_descriptor_rtx *desc;
4354 target_unit *bytes;
4355 unsigned short size;
4356 unsigned short offset;
4357 unsigned int hash;
4358 };
4359
4360 /* qsort callback to sort constant_descriptor_rtx_data * vector by
4361 decreasing size. */
4362
4363 static int
4364 constant_descriptor_rtx_data_cmp (const void *p1, const void *p2)
4365 {
4366 constant_descriptor_rtx_data *const data1
4367 = *(constant_descriptor_rtx_data * const *) p1;
4368 constant_descriptor_rtx_data *const data2
4369 = *(constant_descriptor_rtx_data * const *) p2;
4370 if (data1->size > data2->size)
4371 return -1;
4372 if (data1->size < data2->size)
4373 return 1;
4374 if (data1->hash < data2->hash)
4375 return -1;
4376 gcc_assert (data1->hash > data2->hash);
4377 return 1;
4378 }
4379
4380 struct const_rtx_data_hasher : nofree_ptr_hash<constant_descriptor_rtx_data>
4381 {
4382 static hashval_t hash (constant_descriptor_rtx_data *);
4383 static bool equal (constant_descriptor_rtx_data *,
4384 constant_descriptor_rtx_data *);
4385 };
4386
4387 /* Hash and compare functions for const_rtx_data_htab. */
4388
4389 hashval_t
4390 const_rtx_data_hasher::hash (constant_descriptor_rtx_data *data)
4391 {
4392 return data->hash;
4393 }
4394
4395 bool
4396 const_rtx_data_hasher::equal (constant_descriptor_rtx_data *x,
4397 constant_descriptor_rtx_data *y)
4398 {
4399 if (x->hash != y->hash || x->size != y->size)
4400 return 0;
4401 unsigned int align1 = x->desc->align;
4402 unsigned int align2 = y->desc->align;
4403 unsigned int offset1 = (x->offset * BITS_PER_UNIT) & (align1 - 1);
4404 unsigned int offset2 = (y->offset * BITS_PER_UNIT) & (align2 - 1);
4405 if (offset1)
4406 align1 = least_bit_hwi (offset1);
4407 if (offset2)
4408 align2 = least_bit_hwi (offset2);
4409 if (align2 > align1)
4410 return 0;
4411 if (memcmp (x->bytes, y->bytes, x->size * sizeof (target_unit)) != 0)
4412 return 0;
4413 return 1;
4414 }
4415
4416 /* Attempt to optimize constant pool POOL. If it contains both CONST_VECTOR
4417 constants and scalar constants with the values of CONST_VECTOR elements,
4418 try to alias the scalar constants with the CONST_VECTOR elements. */
4419
4420 static void
4421 optimize_constant_pool (struct rtx_constant_pool *pool)
4422 {
4423 auto_vec<target_unit, 128> buffer;
4424 auto_vec<constant_descriptor_rtx_data *, 128> vec;
4425 object_allocator<constant_descriptor_rtx_data>
4426 data_pool ("constant_descriptor_rtx_data_pool");
4427 int idx = 0;
4428 size_t size = 0;
4429 for (constant_descriptor_rtx *desc = pool->first; desc; desc = desc->next)
4430 if (desc->mark > 0
4431 && ! (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4432 && SYMBOL_REF_BLOCK (desc->sym)))
4433 {
4434 buffer.truncate (0);
4435 buffer.reserve (GET_MODE_SIZE (desc->mode));
4436 if (native_encode_rtx (desc->mode, desc->constant, buffer, 0,
4437 GET_MODE_SIZE (desc->mode)))
4438 {
4439 constant_descriptor_rtx_data *data = data_pool.allocate ();
4440 data->desc = desc;
4441 data->bytes = NULL;
4442 data->size = GET_MODE_SIZE (desc->mode);
4443 data->offset = 0;
4444 data->hash = idx++;
4445 size += data->size;
4446 vec.safe_push (data);
4447 }
4448 }
4449 if (idx)
4450 {
4451 vec.qsort (constant_descriptor_rtx_data_cmp);
4452 unsigned min_size = vec.last ()->size;
4453 target_unit *bytes = XNEWVEC (target_unit, size);
4454 unsigned int i;
4455 constant_descriptor_rtx_data *data;
4456 hash_table<const_rtx_data_hasher> * htab
4457 = new hash_table<const_rtx_data_hasher> (31);
4458 size = 0;
4459 FOR_EACH_VEC_ELT (vec, i, data)
4460 {
4461 buffer.truncate (0);
4462 native_encode_rtx (data->desc->mode, data->desc->constant,
4463 buffer, 0, data->size);
4464 memcpy (bytes + size, buffer.address (), data->size);
4465 data->bytes = bytes + size;
4466 data->hash = iterative_hash (data->bytes,
4467 data->size * sizeof (target_unit), 0);
4468 size += data->size;
4469 constant_descriptor_rtx_data **slot
4470 = htab->find_slot_with_hash (data, data->hash, INSERT);
4471 if (*slot)
4472 {
4473 data->desc->mark = ~(*slot)->desc->labelno;
4474 data->desc->offset = (*slot)->offset;
4475 }
4476 else
4477 {
4478 unsigned int sz = 1 << floor_log2 (data->size);
4479
4480 *slot = data;
4481 for (sz >>= 1; sz >= min_size; sz >>= 1)
4482 for (unsigned off = 0; off + sz <= data->size; off += sz)
4483 {
4484 constant_descriptor_rtx_data tmp;
4485 tmp.desc = data->desc;
4486 tmp.bytes = data->bytes + off;
4487 tmp.size = sz;
4488 tmp.offset = off;
4489 tmp.hash = iterative_hash (tmp.bytes,
4490 sz * sizeof (target_unit), 0);
4491 slot = htab->find_slot_with_hash (&tmp, tmp.hash, INSERT);
4492 if (*slot == NULL)
4493 {
4494 *slot = data_pool.allocate ();
4495 **slot = tmp;
4496 }
4497 }
4498 }
4499 }
4500 delete htab;
4501 XDELETE (bytes);
4502 }
4503 data_pool.release ();
4504 }
4505
4506 /* Mark all constants that are used in the current function, then write
4507 out the function's private constant pool. */
4508
4509 static void
4510 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
4511 tree fndecl ATTRIBUTE_UNUSED)
4512 {
4513 struct rtx_constant_pool *pool = crtl->varasm.pool;
4514
4515 /* It is possible for gcc to call force_const_mem and then to later
4516 discard the instructions which refer to the constant. In such a
4517 case we do not need to output the constant. */
4518 mark_constant_pool ();
4519
4520 /* Having marked the constant pool entries we'll actually emit, we
4521 now need to rebuild the offset information, which may have become
4522 stale. */
4523 recompute_pool_offsets (pool);
4524
4525 #ifdef ASM_OUTPUT_POOL_PROLOGUE
4526 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset);
4527 #endif
4528
4529 output_constant_pool_contents (pool);
4530
4531 #ifdef ASM_OUTPUT_POOL_EPILOGUE
4532 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
4533 #endif
4534 }
4535 \f
4536 /* Write the contents of the shared constant pool. */
4537
4538 void
4539 output_shared_constant_pool (void)
4540 {
4541 if (optimize
4542 && TARGET_SUPPORTS_ALIASES)
4543 optimize_constant_pool (shared_constant_pool);
4544
4545 output_constant_pool_contents (shared_constant_pool);
4546 }
4547 \f
4548 /* Determine what kind of relocations EXP may need. */
4549
4550 int
4551 compute_reloc_for_constant (tree exp)
4552 {
4553 int reloc = 0, reloc2;
4554 tree tem;
4555
4556 switch (TREE_CODE (exp))
4557 {
4558 case ADDR_EXPR:
4559 case FDESC_EXPR:
4560 /* Go inside any operations that get_inner_reference can handle and see
4561 if what's inside is a constant: no need to do anything here for
4562 addresses of variables or functions. */
4563 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4564 tem = TREE_OPERAND (tem, 0))
4565 ;
4566
4567 if (TREE_CODE (tem) == MEM_REF
4568 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR)
4569 {
4570 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0));
4571 break;
4572 }
4573
4574 if (!targetm.binds_local_p (tem))
4575 reloc |= 2;
4576 else
4577 reloc |= 1;
4578 break;
4579
4580 case PLUS_EXPR:
4581 case POINTER_PLUS_EXPR:
4582 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4583 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4584 break;
4585
4586 case MINUS_EXPR:
4587 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4588 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4589 /* The difference of two local labels is computable at link time. */
4590 if (reloc == 1 && reloc2 == 1)
4591 reloc = 0;
4592 else
4593 reloc |= reloc2;
4594 break;
4595
4596 CASE_CONVERT:
4597 case VIEW_CONVERT_EXPR:
4598 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4599 break;
4600
4601 case CONSTRUCTOR:
4602 {
4603 unsigned HOST_WIDE_INT idx;
4604 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4605 if (tem != 0)
4606 reloc |= compute_reloc_for_constant (tem);
4607 }
4608 break;
4609
4610 default:
4611 break;
4612 }
4613 return reloc;
4614 }
4615
4616 /* Find all the constants whose addresses are referenced inside of EXP,
4617 and make sure assembler code with a label has been output for each one.
4618 Indicate whether an ADDR_EXPR has been encountered. */
4619
4620 static void
4621 output_addressed_constants (tree exp, int defer)
4622 {
4623 tree tem;
4624
4625 switch (TREE_CODE (exp))
4626 {
4627 case ADDR_EXPR:
4628 case FDESC_EXPR:
4629 /* Go inside any operations that get_inner_reference can handle and see
4630 if what's inside is a constant: no need to do anything here for
4631 addresses of variables or functions. */
4632 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4633 tem = TREE_OPERAND (tem, 0))
4634 ;
4635
4636 /* If we have an initialized CONST_DECL, retrieve the initializer. */
4637 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem))
4638 tem = DECL_INITIAL (tem);
4639
4640 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR)
4641 output_constant_def (tem, defer);
4642
4643 if (TREE_CODE (tem) == MEM_REF)
4644 output_addressed_constants (TREE_OPERAND (tem, 0), defer);
4645 break;
4646
4647 case PLUS_EXPR:
4648 case POINTER_PLUS_EXPR:
4649 case MINUS_EXPR:
4650 output_addressed_constants (TREE_OPERAND (exp, 1), defer);
4651 gcc_fallthrough ();
4652
4653 CASE_CONVERT:
4654 case VIEW_CONVERT_EXPR:
4655 output_addressed_constants (TREE_OPERAND (exp, 0), defer);
4656 break;
4657
4658 case CONSTRUCTOR:
4659 {
4660 unsigned HOST_WIDE_INT idx;
4661 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4662 if (tem != 0)
4663 output_addressed_constants (tem, defer);
4664 }
4665 break;
4666
4667 default:
4668 break;
4669 }
4670 }
4671 \f
4672 /* Whether a constructor CTOR is a valid static constant initializer if all
4673 its elements are. This used to be internal to initializer_constant_valid_p
4674 and has been exposed to let other functions like categorize_ctor_elements
4675 evaluate the property while walking a constructor for other purposes. */
4676
4677 bool
4678 constructor_static_from_elts_p (const_tree ctor)
4679 {
4680 return (TREE_CONSTANT (ctor)
4681 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4682 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE
4683 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE));
4684 }
4685
4686 static tree initializer_constant_valid_p_1 (tree value, tree endtype,
4687 tree *cache);
4688
4689 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
4690 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
4691 which are valid when ENDTYPE is an integer of any size; in
4692 particular, this does not accept a pointer minus a constant. This
4693 returns null_pointer_node if the VALUE is an absolute constant
4694 which can be used to initialize a static variable. Otherwise it
4695 returns NULL. */
4696
4697 static tree
4698 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache)
4699 {
4700 tree op0, op1;
4701
4702 if (!INTEGRAL_TYPE_P (endtype))
4703 return NULL_TREE;
4704
4705 op0 = TREE_OPERAND (value, 0);
4706 op1 = TREE_OPERAND (value, 1);
4707
4708 /* Like STRIP_NOPS except allow the operand mode to widen. This
4709 works around a feature of fold that simplifies (int)(p1 - p2) to
4710 ((int)p1 - (int)p2) under the theory that the narrower operation
4711 is cheaper. */
4712
4713 while (CONVERT_EXPR_P (op0)
4714 || TREE_CODE (op0) == NON_LVALUE_EXPR)
4715 {
4716 tree inner = TREE_OPERAND (op0, 0);
4717 if (inner == error_mark_node
4718 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4719 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
4720 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4721 break;
4722 op0 = inner;
4723 }
4724
4725 while (CONVERT_EXPR_P (op1)
4726 || TREE_CODE (op1) == NON_LVALUE_EXPR)
4727 {
4728 tree inner = TREE_OPERAND (op1, 0);
4729 if (inner == error_mark_node
4730 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4731 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
4732 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4733 break;
4734 op1 = inner;
4735 }
4736
4737 op0 = initializer_constant_valid_p_1 (op0, endtype, cache);
4738 if (!op0)
4739 return NULL_TREE;
4740
4741 op1 = initializer_constant_valid_p_1 (op1, endtype,
4742 cache ? cache + 2 : NULL);
4743 /* Both initializers must be known. */
4744 if (op1)
4745 {
4746 if (op0 == op1
4747 && (op0 == null_pointer_node
4748 || TREE_CODE (value) == MINUS_EXPR))
4749 return null_pointer_node;
4750
4751 /* Support differences between labels. */
4752 if (TREE_CODE (op0) == LABEL_DECL
4753 && TREE_CODE (op1) == LABEL_DECL)
4754 return null_pointer_node;
4755
4756 if (TREE_CODE (op0) == STRING_CST
4757 && TREE_CODE (op1) == STRING_CST
4758 && operand_equal_p (op0, op1, 1))
4759 return null_pointer_node;
4760 }
4761
4762 return NULL_TREE;
4763 }
4764
4765 /* Helper function of initializer_constant_valid_p.
4766 Return nonzero if VALUE is a valid constant-valued expression
4767 for use in initializing a static variable; one that can be an
4768 element of a "constant" initializer.
4769
4770 Return null_pointer_node if the value is absolute;
4771 if it is relocatable, return the variable that determines the relocation.
4772 We assume that VALUE has been folded as much as possible;
4773 therefore, we do not need to check for such things as
4774 arithmetic-combinations of integers.
4775
4776 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */
4777
4778 static tree
4779 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache)
4780 {
4781 tree ret;
4782
4783 switch (TREE_CODE (value))
4784 {
4785 case CONSTRUCTOR:
4786 if (constructor_static_from_elts_p (value))
4787 {
4788 unsigned HOST_WIDE_INT idx;
4789 tree elt;
4790 bool absolute = true;
4791
4792 if (cache && cache[0] == value)
4793 return cache[1];
4794 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4795 {
4796 tree reloc;
4797 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt),
4798 NULL);
4799 if (!reloc
4800 /* An absolute value is required with reverse SSO. */
4801 || (reloc != null_pointer_node
4802 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value))
4803 && !AGGREGATE_TYPE_P (TREE_TYPE (elt))))
4804 {
4805 if (cache)
4806 {
4807 cache[0] = value;
4808 cache[1] = NULL_TREE;
4809 }
4810 return NULL_TREE;
4811 }
4812 if (reloc != null_pointer_node)
4813 absolute = false;
4814 }
4815 /* For a non-absolute relocation, there is no single
4816 variable that can be "the variable that determines the
4817 relocation." */
4818 if (cache)
4819 {
4820 cache[0] = value;
4821 cache[1] = absolute ? null_pointer_node : error_mark_node;
4822 }
4823 return absolute ? null_pointer_node : error_mark_node;
4824 }
4825
4826 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE;
4827
4828 case INTEGER_CST:
4829 case VECTOR_CST:
4830 case REAL_CST:
4831 case FIXED_CST:
4832 case STRING_CST:
4833 case COMPLEX_CST:
4834 return null_pointer_node;
4835
4836 case ADDR_EXPR:
4837 case FDESC_EXPR:
4838 {
4839 tree op0 = staticp (TREE_OPERAND (value, 0));
4840 if (op0)
4841 {
4842 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
4843 to be a constant, this is old-skool offsetof-like nonsense. */
4844 if (TREE_CODE (op0) == INDIRECT_REF
4845 && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
4846 return null_pointer_node;
4847 /* Taking the address of a nested function involves a trampoline,
4848 unless we don't need or want one. */
4849 if (TREE_CODE (op0) == FUNCTION_DECL
4850 && DECL_STATIC_CHAIN (op0)
4851 && !TREE_NO_TRAMPOLINE (value))
4852 return NULL_TREE;
4853 /* "&{...}" requires a temporary to hold the constructed
4854 object. */
4855 if (TREE_CODE (op0) == CONSTRUCTOR)
4856 return NULL_TREE;
4857 }
4858 return op0;
4859 }
4860
4861 case NON_LVALUE_EXPR:
4862 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4863 endtype, cache);
4864
4865 case VIEW_CONVERT_EXPR:
4866 {
4867 tree src = TREE_OPERAND (value, 0);
4868 tree src_type = TREE_TYPE (src);
4869 tree dest_type = TREE_TYPE (value);
4870
4871 /* Allow view-conversions from aggregate to non-aggregate type only
4872 if the bit pattern is fully preserved afterwards; otherwise, the
4873 RTL expander won't be able to apply a subsequent transformation
4874 to the underlying constructor. */
4875 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
4876 {
4877 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
4878 return initializer_constant_valid_p_1 (src, endtype, cache);
4879 else
4880 return NULL_TREE;
4881 }
4882
4883 /* Allow all other kinds of view-conversion. */
4884 return initializer_constant_valid_p_1 (src, endtype, cache);
4885 }
4886
4887 CASE_CONVERT:
4888 {
4889 tree src = TREE_OPERAND (value, 0);
4890 tree src_type = TREE_TYPE (src);
4891 tree dest_type = TREE_TYPE (value);
4892
4893 /* Allow conversions between pointer types, floating-point
4894 types, and offset types. */
4895 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type))
4896 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type))
4897 || (TREE_CODE (dest_type) == OFFSET_TYPE
4898 && TREE_CODE (src_type) == OFFSET_TYPE))
4899 return initializer_constant_valid_p_1 (src, endtype, cache);
4900
4901 /* Allow length-preserving conversions between integer types. */
4902 if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)
4903 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type)))
4904 return initializer_constant_valid_p_1 (src, endtype, cache);
4905
4906 /* Allow conversions between other integer types only if
4907 explicit value. Don't allow sign-extension to a type larger
4908 than word and pointer, there aren't relocations that would
4909 allow to sign extend it to a wider type. */
4910 if (INTEGRAL_TYPE_P (dest_type)
4911 && INTEGRAL_TYPE_P (src_type)
4912 && (TYPE_UNSIGNED (src_type)
4913 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
4914 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
4915 || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
4916 {
4917 tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
4918 if (inner == null_pointer_node)
4919 return null_pointer_node;
4920 break;
4921 }
4922
4923 /* Allow (int) &foo provided int is as wide as a pointer. */
4924 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)
4925 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)))
4926 return initializer_constant_valid_p_1 (src, endtype, cache);
4927
4928 /* Likewise conversions from int to pointers, but also allow
4929 conversions from 0. */
4930 if ((POINTER_TYPE_P (dest_type)
4931 || TREE_CODE (dest_type) == OFFSET_TYPE)
4932 && INTEGRAL_TYPE_P (src_type))
4933 {
4934 if (TREE_CODE (src) == INTEGER_CST
4935 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))
4936 return null_pointer_node;
4937 if (integer_zerop (src))
4938 return null_pointer_node;
4939 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type))
4940 return initializer_constant_valid_p_1 (src, endtype, cache);
4941 }
4942
4943 /* Allow conversions to struct or union types if the value
4944 inside is okay. */
4945 if (TREE_CODE (dest_type) == RECORD_TYPE
4946 || TREE_CODE (dest_type) == UNION_TYPE)
4947 return initializer_constant_valid_p_1 (src, endtype, cache);
4948 }
4949 break;
4950
4951 case POINTER_PLUS_EXPR:
4952 case PLUS_EXPR:
4953 /* Any valid floating-point constants will have been folded by now;
4954 with -frounding-math we hit this with addition of two constants. */
4955 if (TREE_CODE (endtype) == REAL_TYPE)
4956 return NULL_TREE;
4957 if (cache && cache[0] == value)
4958 return cache[1];
4959 if (! INTEGRAL_TYPE_P (endtype)
4960 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4961 {
4962 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4963 tree valid0
4964 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4965 endtype, ncache);
4966 tree valid1
4967 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4968 endtype, ncache + 2);
4969 /* If either term is absolute, use the other term's relocation. */
4970 if (valid0 == null_pointer_node)
4971 ret = valid1;
4972 else if (valid1 == null_pointer_node)
4973 ret = valid0;
4974 /* Support narrowing pointer differences. */
4975 else
4976 ret = narrowing_initializer_constant_valid_p (value, endtype,
4977 ncache);
4978 }
4979 else
4980 /* Support narrowing pointer differences. */
4981 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4982 if (cache)
4983 {
4984 cache[0] = value;
4985 cache[1] = ret;
4986 }
4987 return ret;
4988
4989 case POINTER_DIFF_EXPR:
4990 case MINUS_EXPR:
4991 if (TREE_CODE (endtype) == REAL_TYPE)
4992 return NULL_TREE;
4993 if (cache && cache[0] == value)
4994 return cache[1];
4995 if (! INTEGRAL_TYPE_P (endtype)
4996 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4997 {
4998 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4999 tree valid0
5000 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
5001 endtype, ncache);
5002 tree valid1
5003 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
5004 endtype, ncache + 2);
5005 /* Win if second argument is absolute. */
5006 if (valid1 == null_pointer_node)
5007 ret = valid0;
5008 /* Win if both arguments have the same relocation.
5009 Then the value is absolute. */
5010 else if (valid0 == valid1 && valid0 != 0)
5011 ret = null_pointer_node;
5012 /* Since GCC guarantees that string constants are unique in the
5013 generated code, a subtraction between two copies of the same
5014 constant string is absolute. */
5015 else if (valid0 && TREE_CODE (valid0) == STRING_CST
5016 && valid1 && TREE_CODE (valid1) == STRING_CST
5017 && operand_equal_p (valid0, valid1, 1))
5018 ret = null_pointer_node;
5019 /* Support narrowing differences. */
5020 else
5021 ret = narrowing_initializer_constant_valid_p (value, endtype,
5022 ncache);
5023 }
5024 else
5025 /* Support narrowing differences. */
5026 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
5027 if (cache)
5028 {
5029 cache[0] = value;
5030 cache[1] = ret;
5031 }
5032 return ret;
5033
5034 default:
5035 break;
5036 }
5037
5038 return NULL_TREE;
5039 }
5040
5041 /* Return nonzero if VALUE is a valid constant-valued expression
5042 for use in initializing a static variable; one that can be an
5043 element of a "constant" initializer.
5044
5045 Return null_pointer_node if the value is absolute;
5046 if it is relocatable, return the variable that determines the relocation.
5047 We assume that VALUE has been folded as much as possible;
5048 therefore, we do not need to check for such things as
5049 arithmetic-combinations of integers. */
5050 tree
5051 initializer_constant_valid_p (tree value, tree endtype, bool reverse)
5052 {
5053 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL);
5054
5055 /* An absolute value is required with reverse storage order. */
5056 if (reloc
5057 && reloc != null_pointer_node
5058 && reverse
5059 && !AGGREGATE_TYPE_P (endtype)
5060 && !VECTOR_TYPE_P (endtype))
5061 reloc = NULL_TREE;
5062
5063 return reloc;
5064 }
5065 \f
5066 /* Return true if VALUE is a valid constant-valued expression
5067 for use in initializing a static bit-field; one that can be
5068 an element of a "constant" initializer. */
5069
5070 bool
5071 initializer_constant_valid_for_bitfield_p (tree value)
5072 {
5073 /* For bitfields we support integer constants or possibly nested aggregates
5074 of such. */
5075 switch (TREE_CODE (value))
5076 {
5077 case CONSTRUCTOR:
5078 {
5079 unsigned HOST_WIDE_INT idx;
5080 tree elt;
5081
5082 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
5083 if (!initializer_constant_valid_for_bitfield_p (elt))
5084 return false;
5085 return true;
5086 }
5087
5088 case INTEGER_CST:
5089 case REAL_CST:
5090 return true;
5091
5092 case VIEW_CONVERT_EXPR:
5093 case NON_LVALUE_EXPR:
5094 return
5095 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
5096
5097 default:
5098 break;
5099 }
5100
5101 return false;
5102 }
5103
5104 /* Check if a STRING_CST fits into the field.
5105 Tolerate only the case when the NUL termination
5106 does not fit into the field. */
5107
5108 static bool
5109 check_string_literal (tree string, unsigned HOST_WIDE_INT size)
5110 {
5111 tree type = TREE_TYPE (string);
5112 tree eltype = TREE_TYPE (type);
5113 unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
5114 unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
5115 int len = TREE_STRING_LENGTH (string);
5116
5117 if (elts != 1 && elts != 2 && elts != 4)
5118 return false;
5119 if (len < 0 || len % elts != 0)
5120 return false;
5121 if (size < (unsigned)len)
5122 return false;
5123 if (mem_size != size)
5124 return false;
5125 return true;
5126 }
5127
5128 /* output_constructor outer state of relevance in recursive calls, typically
5129 for nested aggregate bitfields. */
5130
5131 struct oc_outer_state {
5132 unsigned int bit_offset; /* current position in ... */
5133 int byte; /* ... the outer byte buffer. */
5134 };
5135
5136 static unsigned HOST_WIDE_INT
5137 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool,
5138 oc_outer_state *);
5139
5140 /* Output assembler code for constant EXP, with no label.
5141 This includes the pseudo-op such as ".int" or ".byte", and a newline.
5142 Assumes output_addressed_constants has been done on EXP already.
5143
5144 Generate at least SIZE bytes of assembler data, padding at the end
5145 with zeros if necessary. SIZE must always be specified. The returned
5146 value is the actual number of bytes of assembler data generated, which
5147 may be bigger than SIZE if the object contains a variable length field.
5148
5149 SIZE is important for structure constructors,
5150 since trailing members may have been omitted from the constructor.
5151 It is also important for initialization of arrays from string constants
5152 since the full length of the string constant might not be wanted.
5153 It is also needed for initialization of unions, where the initializer's
5154 type is just one member, and that may not be as long as the union.
5155
5156 There a case in which we would fail to output exactly SIZE bytes:
5157 for a structure constructor that wants to produce more than SIZE bytes.
5158 But such constructors will never be generated for any possible input.
5159
5160 ALIGN is the alignment of the data in bits.
5161
5162 If REVERSE is true, EXP is output in reverse storage order. */
5163
5164 static unsigned HOST_WIDE_INT
5165 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5166 bool reverse, bool merge_strings)
5167 {
5168 enum tree_code code;
5169 unsigned HOST_WIDE_INT thissize;
5170 rtx cst;
5171
5172 if (size == 0 || flag_syntax_only)
5173 return size;
5174
5175 /* See if we're trying to initialize a pointer in a non-default mode
5176 to the address of some declaration somewhere. If the target says
5177 the mode is valid for pointers, assume the target has a way of
5178 resolving it. */
5179 if (TREE_CODE (exp) == NOP_EXPR
5180 && POINTER_TYPE_P (TREE_TYPE (exp))
5181 && targetm.addr_space.valid_pointer_mode
5182 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5183 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5184 {
5185 tree saved_type = TREE_TYPE (exp);
5186
5187 /* Peel off any intermediate conversions-to-pointer for valid
5188 pointer modes. */
5189 while (TREE_CODE (exp) == NOP_EXPR
5190 && POINTER_TYPE_P (TREE_TYPE (exp))
5191 && targetm.addr_space.valid_pointer_mode
5192 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5193 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5194 exp = TREE_OPERAND (exp, 0);
5195
5196 /* If what we're left with is the address of something, we can
5197 convert the address to the final type and output it that
5198 way. */
5199 if (TREE_CODE (exp) == ADDR_EXPR)
5200 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
5201 /* Likewise for constant ints. */
5202 else if (TREE_CODE (exp) == INTEGER_CST)
5203 exp = fold_convert (saved_type, exp);
5204
5205 }
5206
5207 /* Eliminate any conversions since we'll be outputting the underlying
5208 constant. */
5209 while (CONVERT_EXPR_P (exp)
5210 || TREE_CODE (exp) == NON_LVALUE_EXPR
5211 || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
5212 {
5213 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp));
5214 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0)));
5215
5216 /* Make sure eliminating the conversion is really a no-op, except with
5217 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and
5218 union types to allow for Ada unchecked unions. */
5219 if (type_size > op_size
5220 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5221 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE)
5222 /* Keep the conversion. */
5223 break;
5224 else
5225 exp = TREE_OPERAND (exp, 0);
5226 }
5227
5228 code = TREE_CODE (TREE_TYPE (exp));
5229 thissize = int_size_in_bytes (TREE_TYPE (exp));
5230
5231 /* Allow a constructor with no elements for any data type.
5232 This means to fill the space with zeros. */
5233 if (TREE_CODE (exp) == CONSTRUCTOR
5234 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
5235 {
5236 assemble_zeros (size);
5237 return size;
5238 }
5239
5240 if (TREE_CODE (exp) == FDESC_EXPR)
5241 {
5242 #ifdef ASM_OUTPUT_FDESC
5243 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
5244 tree decl = TREE_OPERAND (exp, 0);
5245 ASM_OUTPUT_FDESC (asm_out_file, decl, part);
5246 #else
5247 gcc_unreachable ();
5248 #endif
5249 return size;
5250 }
5251
5252 /* Now output the underlying data. If we've handling the padding, return.
5253 Otherwise, break and ensure SIZE is the size written. */
5254 switch (code)
5255 {
5256 case BOOLEAN_TYPE:
5257 case INTEGER_TYPE:
5258 case ENUMERAL_TYPE:
5259 case POINTER_TYPE:
5260 case REFERENCE_TYPE:
5261 case OFFSET_TYPE:
5262 case FIXED_POINT_TYPE:
5263 case NULLPTR_TYPE:
5264 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
5265 if (reverse)
5266 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst);
5267 if (!assemble_integer (cst, MIN (size, thissize), align, 0))
5268 error ("initializer for integer/fixed-point value is too complicated");
5269 break;
5270
5271 case REAL_TYPE:
5272 if (TREE_CODE (exp) != REAL_CST)
5273 error ("initializer for floating value is not a floating constant");
5274 else
5275 assemble_real (TREE_REAL_CST (exp),
5276 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
5277 align, reverse);
5278 break;
5279
5280 case COMPLEX_TYPE:
5281 output_constant (TREE_REALPART (exp), thissize / 2, align,
5282 reverse, false);
5283 output_constant (TREE_IMAGPART (exp), thissize / 2,
5284 min_align (align, BITS_PER_UNIT * (thissize / 2)),
5285 reverse, false);
5286 break;
5287
5288 case ARRAY_TYPE:
5289 case VECTOR_TYPE:
5290 switch (TREE_CODE (exp))
5291 {
5292 case CONSTRUCTOR:
5293 return output_constructor (exp, size, align, reverse, NULL);
5294 case STRING_CST:
5295 thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp);
5296 if (merge_strings
5297 && (thissize == 0
5298 || TREE_STRING_POINTER (exp) [thissize - 1] != '\0'))
5299 thissize++;
5300 gcc_checking_assert (check_string_literal (exp, size));
5301 assemble_string (TREE_STRING_POINTER (exp), thissize);
5302 break;
5303 case VECTOR_CST:
5304 {
5305 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5306 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
5307 int elt_size = GET_MODE_SIZE (inner);
5308 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
5309 reverse, false);
5310 thissize = elt_size;
5311 /* Static constants must have a fixed size. */
5312 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
5313 for (unsigned int i = 1; i < nunits; i++)
5314 {
5315 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
5316 reverse, false);
5317 thissize += elt_size;
5318 }
5319 break;
5320 }
5321 default:
5322 gcc_unreachable ();
5323 }
5324 break;
5325
5326 case RECORD_TYPE:
5327 case UNION_TYPE:
5328 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
5329 return output_constructor (exp, size, align, reverse, NULL);
5330
5331 case ERROR_MARK:
5332 return 0;
5333
5334 default:
5335 gcc_unreachable ();
5336 }
5337
5338 if (size > thissize)
5339 assemble_zeros (size - thissize);
5340
5341 return size;
5342 }
5343 \f
5344 /* Subroutine of output_constructor, used for computing the size of
5345 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array
5346 type with an unspecified upper bound. */
5347
5348 static unsigned HOST_WIDE_INT
5349 array_size_for_constructor (tree val)
5350 {
5351 tree max_index;
5352 unsigned HOST_WIDE_INT cnt;
5353 tree index, value, tmp;
5354 offset_int i;
5355
5356 /* This code used to attempt to handle string constants that are not
5357 arrays of single-bytes, but nothing else does, so there's no point in
5358 doing it here. */
5359 if (TREE_CODE (val) == STRING_CST)
5360 return TREE_STRING_LENGTH (val);
5361
5362 max_index = NULL_TREE;
5363 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value)
5364 {
5365 if (TREE_CODE (index) == RANGE_EXPR)
5366 index = TREE_OPERAND (index, 1);
5367 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index))
5368 max_index = index;
5369 }
5370
5371 if (max_index == NULL_TREE)
5372 return 0;
5373
5374 /* Compute the total number of array elements. */
5375 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
5376 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
5377
5378 /* Multiply by the array element unit size to find number of bytes. */
5379 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
5380
5381 gcc_assert (wi::fits_uhwi_p (i));
5382 return i.to_uhwi ();
5383 }
5384
5385 /* Other datastructures + helpers for output_constructor. */
5386
5387 /* output_constructor local state to support interaction with helpers. */
5388
5389 struct oc_local_state {
5390
5391 /* Received arguments. */
5392 tree exp; /* Constructor expression. */
5393 tree type; /* Type of constructor expression. */
5394 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
5395 unsigned int align; /* Known initial alignment. */
5396 tree min_index; /* Lower bound if specified for an array. */
5397
5398 /* Output processing state. */
5399 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
5400 int byte; /* Part of a bitfield byte yet to be output. */
5401 int last_relative_index; /* Implicit or explicit index of the last
5402 array element output within a bitfield. */
5403 bool byte_buffer_in_use; /* Whether BYTE is in use. */
5404 bool reverse; /* Whether reverse storage order is in use. */
5405
5406 /* Current element. */
5407 tree field; /* Current field decl in a record. */
5408 tree val; /* Current element value. */
5409 tree index; /* Current element index. */
5410
5411 };
5412
5413 /* Helper for output_constructor. From the current LOCAL state, output a
5414 RANGE_EXPR element. */
5415
5416 static void
5417 output_constructor_array_range (oc_local_state *local)
5418 {
5419 /* Perform the index calculation in modulo arithmetic but
5420 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5421 but we are using an unsigned sizetype. */
5422 unsigned prec = TYPE_PRECISION (sizetype);
5423 offset_int idx = wi::sext (wi::to_offset (TREE_OPERAND (local->index, 0))
5424 - wi::to_offset (local->min_index), prec);
5425 tree valtype = TREE_TYPE (local->val);
5426 HOST_WIDE_INT fieldpos
5427 = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr ();
5428
5429 /* Advance to offset of this element. */
5430 if (fieldpos > local->total_bytes)
5431 {
5432 assemble_zeros (fieldpos - local->total_bytes);
5433 local->total_bytes = fieldpos;
5434 }
5435 else
5436 /* Must not go backwards. */
5437 gcc_assert (fieldpos == local->total_bytes);
5438
5439 unsigned HOST_WIDE_INT fieldsize
5440 = int_size_in_bytes (TREE_TYPE (local->type));
5441
5442 HOST_WIDE_INT lo_index
5443 = tree_to_shwi (TREE_OPERAND (local->index, 0));
5444 HOST_WIDE_INT hi_index
5445 = tree_to_shwi (TREE_OPERAND (local->index, 1));
5446 HOST_WIDE_INT index;
5447
5448 unsigned int align2
5449 = min_align (local->align, fieldsize * BITS_PER_UNIT);
5450
5451 for (index = lo_index; index <= hi_index; index++)
5452 {
5453 /* Output the element's initial value. */
5454 if (local->val == NULL_TREE)
5455 assemble_zeros (fieldsize);
5456 else
5457 fieldsize = output_constant (local->val, fieldsize, align2,
5458 local->reverse, false);
5459
5460 /* Count its size. */
5461 local->total_bytes += fieldsize;
5462 }
5463 }
5464
5465 /* Helper for output_constructor. From the current LOCAL state, output a
5466 field element that is not true bitfield or part of an outer one. */
5467
5468 static void
5469 output_constructor_regular_field (oc_local_state *local)
5470 {
5471 /* Field size and position. Since this structure is static, we know the
5472 positions are constant. */
5473 unsigned HOST_WIDE_INT fieldsize;
5474 HOST_WIDE_INT fieldpos;
5475
5476 unsigned int align2;
5477
5478 /* Output any buffered-up bit-fields preceding this element. */
5479 if (local->byte_buffer_in_use)
5480 {
5481 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5482 local->total_bytes++;
5483 local->byte_buffer_in_use = false;
5484 }
5485
5486 if (local->index != NULL_TREE)
5487 {
5488 /* Perform the index calculation in modulo arithmetic but
5489 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5490 but we are using an unsigned sizetype. */
5491 unsigned prec = TYPE_PRECISION (sizetype);
5492 offset_int idx = wi::sext (wi::to_offset (local->index)
5493 - wi::to_offset (local->min_index), prec);
5494 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
5495 .to_short_addr ();
5496 }
5497 else if (local->field != NULL_TREE)
5498 fieldpos = int_byte_position (local->field);
5499 else
5500 fieldpos = 0;
5501
5502 /* Advance to offset of this element.
5503 Note no alignment needed in an array, since that is guaranteed
5504 if each element has the proper size. */
5505 if (local->field != NULL_TREE || local->index != NULL_TREE)
5506 {
5507 if (fieldpos > local->total_bytes)
5508 {
5509 assemble_zeros (fieldpos - local->total_bytes);
5510 local->total_bytes = fieldpos;
5511 }
5512 else
5513 /* Must not go backwards. */
5514 gcc_assert (fieldpos == local->total_bytes);
5515 }
5516
5517 /* Find the alignment of this element. */
5518 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
5519
5520 /* Determine size this element should occupy. */
5521 if (local->field)
5522 {
5523 fieldsize = 0;
5524
5525 /* If this is an array with an unspecified upper bound,
5526 the initializer determines the size. */
5527 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
5528 but we cannot do this until the deprecated support for
5529 initializing zero-length array members is removed. */
5530 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
5531 && (!TYPE_DOMAIN (TREE_TYPE (local->field))
5532 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field)))))
5533 {
5534 unsigned HOST_WIDE_INT fldsize
5535 = array_size_for_constructor (local->val);
5536 fieldsize = int_size_in_bytes (TREE_TYPE (local->val));
5537 /* In most cases fieldsize == fldsize as the size of the initializer
5538 determines how many elements the flexible array member has. For
5539 C++ fldsize can be smaller though, if the last or several last or
5540 all initializers of the flexible array member have side-effects
5541 and the FE splits them into dynamic initialization. */
5542 gcc_checking_assert (fieldsize >= fldsize);
5543 /* Given a non-empty initialization, this field had better
5544 be last. Given a flexible array member, the next field
5545 on the chain is a TYPE_DECL of the enclosing struct. */
5546 const_tree next = DECL_CHAIN (local->field);
5547 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
5548 }
5549 else
5550 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
5551 }
5552 else
5553 fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
5554
5555 /* Output the element's initial value. */
5556 if (local->val == NULL_TREE)
5557 assemble_zeros (fieldsize);
5558 else
5559 fieldsize = output_constant (local->val, fieldsize, align2,
5560 local->reverse, false);
5561
5562 /* Count its size. */
5563 local->total_bytes += fieldsize;
5564 }
5565
5566 /* Helper for output_constructor. From the LOCAL state, output an element
5567 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset
5568 from the start of a possibly ongoing outer byte buffer. */
5569
5570 static void
5571 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
5572 {
5573 /* Bit size of this element. */
5574 HOST_WIDE_INT ebitsize
5575 = (local->field
5576 ? tree_to_uhwi (DECL_SIZE (local->field))
5577 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
5578
5579 /* Relative index of this element if this is an array component. */
5580 HOST_WIDE_INT relative_index
5581 = (!local->field
5582 ? (local->index
5583 ? (tree_to_shwi (local->index)
5584 - tree_to_shwi (local->min_index))
5585 : local->last_relative_index + 1)
5586 : 0);
5587
5588 /* Bit position of this element from the start of the containing
5589 constructor. */
5590 HOST_WIDE_INT constructor_relative_ebitpos
5591 = (local->field
5592 ? int_bit_position (local->field)
5593 : ebitsize * relative_index);
5594
5595 /* Bit position of this element from the start of a possibly ongoing
5596 outer byte buffer. */
5597 HOST_WIDE_INT byte_relative_ebitpos
5598 = bit_offset + constructor_relative_ebitpos;
5599
5600 /* From the start of a possibly ongoing outer byte buffer, offsets to
5601 the first bit of this element and to the first bit past the end of
5602 this element. */
5603 HOST_WIDE_INT next_offset = byte_relative_ebitpos;
5604 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
5605
5606 local->last_relative_index = relative_index;
5607
5608 if (local->val == NULL_TREE)
5609 local->val = integer_zero_node;
5610
5611 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
5612 || TREE_CODE (local->val) == NON_LVALUE_EXPR)
5613 local->val = TREE_OPERAND (local->val, 0);
5614
5615 if (TREE_CODE (local->val) != INTEGER_CST
5616 && TREE_CODE (local->val) != CONSTRUCTOR)
5617 {
5618 error ("invalid initial value for member %qE", DECL_NAME (local->field));
5619 return;
5620 }
5621
5622 /* If this field does not start in this (or next) byte, skip some bytes. */
5623 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5624 {
5625 /* Output remnant of any bit field in previous bytes. */
5626 if (local->byte_buffer_in_use)
5627 {
5628 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5629 local->total_bytes++;
5630 local->byte_buffer_in_use = false;
5631 }
5632
5633 /* If still not at proper byte, advance to there. */
5634 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5635 {
5636 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
5637 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
5638 local->total_bytes = next_offset / BITS_PER_UNIT;
5639 }
5640 }
5641
5642 /* Set up the buffer if necessary. */
5643 if (!local->byte_buffer_in_use)
5644 {
5645 local->byte = 0;
5646 if (ebitsize > 0)
5647 local->byte_buffer_in_use = true;
5648 }
5649
5650 /* If this is nested constructor, recurse passing the bit offset and the
5651 pending data, then retrieve the new pending data afterwards. */
5652 if (TREE_CODE (local->val) == CONSTRUCTOR)
5653 {
5654 oc_outer_state temp_state;
5655 temp_state.bit_offset = next_offset % BITS_PER_UNIT;
5656 temp_state.byte = local->byte;
5657 local->total_bytes
5658 += output_constructor (local->val, 0, 0, local->reverse, &temp_state);
5659 local->byte = temp_state.byte;
5660 return;
5661 }
5662
5663 /* Otherwise, we must split the element into pieces that fall within
5664 separate bytes, and combine each byte with previous or following
5665 bit-fields. */
5666 while (next_offset < end_offset)
5667 {
5668 int this_time;
5669 int shift;
5670 unsigned HOST_WIDE_INT value;
5671 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
5672 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
5673
5674 /* Advance from byte to byte within this element when necessary. */
5675 while (next_byte != local->total_bytes)
5676 {
5677 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5678 local->total_bytes++;
5679 local->byte = 0;
5680 }
5681
5682 /* Number of bits we can process at once (all part of the same byte). */
5683 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit);
5684 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5685 {
5686 /* For big-endian data, take the most significant bits (of the
5687 bits that are significant) first and put them into bytes from
5688 the most significant end. */
5689 shift = end_offset - next_offset - this_time;
5690
5691 /* Don't try to take a bunch of bits that cross
5692 the word boundary in the INTEGER_CST. We can
5693 only select bits from one element. */
5694 if ((shift / HOST_BITS_PER_WIDE_INT)
5695 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5696 {
5697 const int end = shift + this_time - 1;
5698 shift = end & -HOST_BITS_PER_WIDE_INT;
5699 this_time = end - shift + 1;
5700 }
5701
5702 /* Now get the bits we want to insert. */
5703 value = wi::extract_uhwi (wi::to_widest (local->val),
5704 shift, this_time);
5705
5706 /* Get the result. This works only when:
5707 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5708 local->byte |= value << (BITS_PER_UNIT - this_time - next_bit);
5709 }
5710 else
5711 {
5712 /* On little-endian machines, take the least significant bits of
5713 the value first and pack them starting at the least significant
5714 bits of the bytes. */
5715 shift = next_offset - byte_relative_ebitpos;
5716
5717 /* Don't try to take a bunch of bits that cross
5718 the word boundary in the INTEGER_CST. We can
5719 only select bits from one element. */
5720 if ((shift / HOST_BITS_PER_WIDE_INT)
5721 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5722 this_time
5723 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
5724
5725 /* Now get the bits we want to insert. */
5726 value = wi::extract_uhwi (wi::to_widest (local->val),
5727 shift, this_time);
5728
5729 /* Get the result. This works only when:
5730 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5731 local->byte |= value << next_bit;
5732 }
5733
5734 next_offset += this_time;
5735 local->byte_buffer_in_use = true;
5736 }
5737 }
5738
5739 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
5740 Generate at least SIZE bytes, padding if necessary. OUTER designates the
5741 caller output state of relevance in recursive invocations. */
5742
5743 static unsigned HOST_WIDE_INT
5744 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5745 bool reverse, oc_outer_state *outer)
5746 {
5747 unsigned HOST_WIDE_INT cnt;
5748 constructor_elt *ce;
5749 oc_local_state local;
5750
5751 /* Setup our local state to communicate with helpers. */
5752 local.exp = exp;
5753 local.type = TREE_TYPE (exp);
5754 local.size = size;
5755 local.align = align;
5756 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type))
5757 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
5758 else
5759 local.min_index = integer_zero_node;
5760
5761 local.total_bytes = 0;
5762 local.byte_buffer_in_use = outer != NULL;
5763 local.byte = outer ? outer->byte : 0;
5764 local.last_relative_index = -1;
5765 /* The storage order is specified for every aggregate type. */
5766 if (AGGREGATE_TYPE_P (local.type))
5767 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type);
5768 else
5769 local.reverse = reverse;
5770
5771 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
5772
5773 /* As CE goes through the elements of the constant, FIELD goes through the
5774 structure fields if the constant is a structure. If the constant is a
5775 union, we override this by getting the field from the TREE_LIST element.
5776 But the constant could also be an array. Then FIELD is zero.
5777
5778 There is always a maximum of one element in the chain LINK for unions
5779 (even if the initializer in a source program incorrectly contains
5780 more one). */
5781
5782 if (TREE_CODE (local.type) == RECORD_TYPE)
5783 local.field = TYPE_FIELDS (local.type);
5784 else
5785 local.field = NULL_TREE;
5786
5787 for (cnt = 0;
5788 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
5789 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
5790 {
5791 local.val = ce->value;
5792 local.index = NULL_TREE;
5793
5794 /* The element in a union constructor specifies the proper field
5795 or index. */
5796 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE)
5797 local.field = ce->index;
5798
5799 else if (TREE_CODE (local.type) == ARRAY_TYPE)
5800 local.index = ce->index;
5801
5802 if (local.field && flag_verbose_asm)
5803 fprintf (asm_out_file, "%s %s:\n",
5804 ASM_COMMENT_START,
5805 DECL_NAME (local.field)
5806 ? IDENTIFIER_POINTER (DECL_NAME (local.field))
5807 : "<anonymous>");
5808
5809 /* Eliminate the marker that makes a cast not be an lvalue. */
5810 if (local.val != NULL_TREE)
5811 STRIP_NOPS (local.val);
5812
5813 /* Output the current element, using the appropriate helper ... */
5814
5815 /* For an array slice not part of an outer bitfield. */
5816 if (!outer
5817 && local.index != NULL_TREE
5818 && TREE_CODE (local.index) == RANGE_EXPR)
5819 output_constructor_array_range (&local);
5820
5821 /* For a field that is neither a true bitfield nor part of an outer one,
5822 known to be at least byte aligned and multiple-of-bytes long. */
5823 else if (!outer
5824 && (local.field == NULL_TREE
5825 || !CONSTRUCTOR_BITFIELD_P (local.field)))
5826 output_constructor_regular_field (&local);
5827
5828 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are
5829 supported for scalar fields, so we may need to convert first. */
5830 else
5831 {
5832 if (TREE_CODE (local.val) == REAL_CST)
5833 local.val
5834 = fold_unary (VIEW_CONVERT_EXPR,
5835 build_nonstandard_integer_type
5836 (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
5837 local.val);
5838 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0);
5839 }
5840 }
5841
5842 /* If we are not at toplevel, save the pending data for our caller.
5843 Otherwise output the pending data and padding zeros as needed. */
5844 if (outer)
5845 outer->byte = local.byte;
5846 else
5847 {
5848 if (local.byte_buffer_in_use)
5849 {
5850 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
5851 local.total_bytes++;
5852 }
5853
5854 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
5855 {
5856 assemble_zeros (local.size - local.total_bytes);
5857 local.total_bytes = local.size;
5858 }
5859 }
5860
5861 return local.total_bytes;
5862 }
5863
5864 /* Mark DECL as weak. */
5865
5866 static void
5867 mark_weak (tree decl)
5868 {
5869 if (DECL_WEAK (decl))
5870 return;
5871
5872 struct symtab_node *n = symtab_node::get (decl);
5873 if (n && n->refuse_visibility_changes)
5874 error ("%+qD declared weak after being used", decl);
5875 DECL_WEAK (decl) = 1;
5876
5877 if (DECL_RTL_SET_P (decl)
5878 && MEM_P (DECL_RTL (decl))
5879 && XEXP (DECL_RTL (decl), 0)
5880 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
5881 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
5882 }
5883
5884 /* Merge weak status between NEWDECL and OLDDECL. */
5885
5886 void
5887 merge_weak (tree newdecl, tree olddecl)
5888 {
5889 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl))
5890 {
5891 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK)
5892 {
5893 tree *pwd;
5894 /* We put the NEWDECL on the weak_decls list at some point
5895 and OLDDECL as well. Keep just OLDDECL on the list. */
5896 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd))
5897 if (TREE_VALUE (*pwd) == newdecl)
5898 {
5899 *pwd = TREE_CHAIN (*pwd);
5900 break;
5901 }
5902 }
5903 return;
5904 }
5905
5906 if (DECL_WEAK (newdecl))
5907 {
5908 tree wd;
5909
5910 /* NEWDECL is weak, but OLDDECL is not. */
5911
5912 /* If we already output the OLDDECL, we're in trouble; we can't
5913 go back and make it weak. This should never happen in
5914 unit-at-a-time compilation. */
5915 gcc_assert (!TREE_ASM_WRITTEN (olddecl));
5916
5917 /* If we've already generated rtl referencing OLDDECL, we may
5918 have done so in a way that will not function properly with
5919 a weak symbol. Again in unit-at-a-time this should be
5920 impossible. */
5921 gcc_assert (!TREE_USED (olddecl)
5922 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl)));
5923
5924 /* PR 49899: You cannot convert a static function into a weak, public function. */
5925 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl))
5926 error ("weak declaration of %q+D being applied to a already "
5927 "existing, static definition", newdecl);
5928
5929 if (TARGET_SUPPORTS_WEAK)
5930 {
5931 /* We put the NEWDECL on the weak_decls list at some point.
5932 Replace it with the OLDDECL. */
5933 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd))
5934 if (TREE_VALUE (wd) == newdecl)
5935 {
5936 TREE_VALUE (wd) = olddecl;
5937 break;
5938 }
5939 /* We may not find the entry on the list. If NEWDECL is a
5940 weak alias, then we will have already called
5941 globalize_decl to remove the entry; in that case, we do
5942 not need to do anything. */
5943 }
5944
5945 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */
5946 mark_weak (olddecl);
5947 }
5948 else
5949 /* OLDDECL was weak, but NEWDECL was not explicitly marked as
5950 weak. Just update NEWDECL to indicate that it's weak too. */
5951 mark_weak (newdecl);
5952 }
5953
5954 /* Declare DECL to be a weak symbol. */
5955
5956 void
5957 declare_weak (tree decl)
5958 {
5959 /* With -fsyntax-only, TREE_ASM_WRITTEN might be set on certain function
5960 decls earlier than normally, but as with -fsyntax-only nothing is really
5961 emitted, there is no harm in marking it weak later. */
5962 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL
5963 || !TREE_ASM_WRITTEN (decl)
5964 || flag_syntax_only);
5965 if (! TREE_PUBLIC (decl))
5966 {
5967 error ("weak declaration of %q+D must be public", decl);
5968 return;
5969 }
5970 else if (!TARGET_SUPPORTS_WEAK)
5971 warning (0, "weak declaration of %q+D not supported", decl);
5972
5973 mark_weak (decl);
5974 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl)))
5975 DECL_ATTRIBUTES (decl)
5976 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl));
5977 }
5978
5979 static void
5980 weak_finish_1 (tree decl)
5981 {
5982 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL)
5983 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5984 #endif
5985
5986 if (! TREE_USED (decl))
5987 return;
5988
5989 #ifdef ASM_WEAKEN_DECL
5990 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL);
5991 #else
5992 #ifdef ASM_WEAKEN_LABEL
5993 ASM_WEAKEN_LABEL (asm_out_file, name);
5994 #else
5995 #ifdef ASM_OUTPUT_WEAK_ALIAS
5996 {
5997 static bool warn_once = 0;
5998 if (! warn_once)
5999 {
6000 warning (0, "only weak aliases are supported in this configuration");
6001 warn_once = 1;
6002 }
6003 return;
6004 }
6005 #endif
6006 #endif
6007 #endif
6008 }
6009
6010 /* Fiven an assembly name, find the decl it is associated with. */
6011 static tree
6012 find_decl (tree target)
6013 {
6014 symtab_node *node = symtab_node::get_for_asmname (target);
6015 if (node)
6016 return node->decl;
6017 return NULL_TREE;
6018 }
6019
6020 /* This TREE_LIST contains weakref targets. */
6021
6022 static GTY(()) tree weakref_targets;
6023
6024 /* Emit any pending weak declarations. */
6025
6026 void
6027 weak_finish (void)
6028 {
6029 tree t;
6030
6031 for (t = weakref_targets; t; t = TREE_CHAIN (t))
6032 {
6033 tree alias_decl = TREE_PURPOSE (t);
6034 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
6035
6036 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl))
6037 || TREE_SYMBOL_REFERENCED (target))
6038 /* Remove alias_decl from the weak list, but leave entries for
6039 the target alone. */
6040 target = NULL_TREE;
6041 #ifndef ASM_OUTPUT_WEAKREF
6042 else if (! TREE_SYMBOL_REFERENCED (target))
6043 {
6044 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
6045 defined, otherwise we and weak_finish_1 would use
6046 different macros. */
6047 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
6048 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
6049 # else
6050 tree decl = find_decl (target);
6051
6052 if (! decl)
6053 {
6054 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
6055 TREE_CODE (alias_decl), target,
6056 TREE_TYPE (alias_decl));
6057
6058 DECL_EXTERNAL (decl) = 1;
6059 TREE_PUBLIC (decl) = 1;
6060 DECL_ARTIFICIAL (decl) = 1;
6061 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl);
6062 TREE_USED (decl) = 1;
6063 }
6064
6065 weak_finish_1 (decl);
6066 # endif
6067 }
6068 #endif
6069
6070 {
6071 tree *p;
6072 tree t2;
6073
6074 /* Remove the alias and the target from the pending weak list
6075 so that we do not emit any .weak directives for the former,
6076 nor multiple .weak directives for the latter. */
6077 for (p = &weak_decls; (t2 = *p) ; )
6078 {
6079 if (TREE_VALUE (t2) == alias_decl
6080 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2)))
6081 *p = TREE_CHAIN (t2);
6082 else
6083 p = &TREE_CHAIN (t2);
6084 }
6085
6086 /* Remove other weakrefs to the same target, to speed things up. */
6087 for (p = &TREE_CHAIN (t); (t2 = *p) ; )
6088 {
6089 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2)))
6090 *p = TREE_CHAIN (t2);
6091 else
6092 p = &TREE_CHAIN (t2);
6093 }
6094 }
6095 }
6096
6097 for (t = weak_decls; t; t = TREE_CHAIN (t))
6098 {
6099 tree decl = TREE_VALUE (t);
6100
6101 weak_finish_1 (decl);
6102 }
6103 }
6104
6105 /* Emit the assembly bits to indicate that DECL is globally visible. */
6106
6107 static void
6108 globalize_decl (tree decl)
6109 {
6110
6111 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
6112 if (DECL_WEAK (decl))
6113 {
6114 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6115 tree *p, t;
6116
6117 #ifdef ASM_WEAKEN_DECL
6118 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0);
6119 #else
6120 ASM_WEAKEN_LABEL (asm_out_file, name);
6121 #endif
6122
6123 /* Remove this function from the pending weak list so that
6124 we do not emit multiple .weak directives for it. */
6125 for (p = &weak_decls; (t = *p) ; )
6126 {
6127 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6128 *p = TREE_CHAIN (t);
6129 else
6130 p = &TREE_CHAIN (t);
6131 }
6132
6133 /* Remove weakrefs to the same target from the pending weakref
6134 list, for the same reason. */
6135 for (p = &weakref_targets; (t = *p) ; )
6136 {
6137 if (DECL_ASSEMBLER_NAME (decl)
6138 == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6139 *p = TREE_CHAIN (t);
6140 else
6141 p = &TREE_CHAIN (t);
6142 }
6143
6144 return;
6145 }
6146 #endif
6147
6148 targetm.asm_out.globalize_decl_name (asm_out_file, decl);
6149 }
6150
6151 vec<alias_pair, va_gc> *alias_pairs;
6152
6153 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
6154 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
6155 tree node is DECL to have the value of the tree node TARGET. */
6156
6157 void
6158 do_assemble_alias (tree decl, tree target)
6159 {
6160 tree id;
6161
6162 /* Emulated TLS had better not get this var. */
6163 gcc_assert (!(!targetm.have_tls
6164 && VAR_P (decl)
6165 && DECL_THREAD_LOCAL_P (decl)));
6166
6167 if (TREE_ASM_WRITTEN (decl))
6168 return;
6169
6170 id = DECL_ASSEMBLER_NAME (decl);
6171 ultimate_transparent_alias_target (&id);
6172 ultimate_transparent_alias_target (&target);
6173
6174 /* We must force creation of DECL_RTL for debug info generation, even though
6175 we don't use it here. */
6176 make_decl_rtl (decl);
6177
6178 TREE_ASM_WRITTEN (decl) = 1;
6179 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1;
6180 TREE_ASM_WRITTEN (id) = 1;
6181
6182 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6183 {
6184 if (!TREE_SYMBOL_REFERENCED (target))
6185 weakref_targets = tree_cons (decl, target, weakref_targets);
6186
6187 #ifdef ASM_OUTPUT_WEAKREF
6188 ASM_OUTPUT_WEAKREF (asm_out_file, decl,
6189 IDENTIFIER_POINTER (id),
6190 IDENTIFIER_POINTER (target));
6191 #else
6192 if (!TARGET_SUPPORTS_WEAK)
6193 {
6194 error_at (DECL_SOURCE_LOCATION (decl),
6195 "weakref is not supported in this configuration");
6196 return;
6197 }
6198 #endif
6199 return;
6200 }
6201
6202 #ifdef ASM_OUTPUT_DEF
6203 tree orig_decl = decl;
6204
6205 /* Make name accessible from other files, if appropriate. */
6206
6207 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
6208 {
6209 globalize_decl (decl);
6210 maybe_assemble_visibility (decl);
6211 }
6212 if (TREE_CODE (decl) == FUNCTION_DECL
6213 && cgraph_node::get (decl)->ifunc_resolver)
6214 {
6215 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
6216 if (targetm.has_ifunc_p ())
6217 ASM_OUTPUT_TYPE_DIRECTIVE
6218 (asm_out_file, IDENTIFIER_POINTER (id),
6219 IFUNC_ASM_TYPE);
6220 else
6221 #endif
6222 error_at (DECL_SOURCE_LOCATION (decl),
6223 "%qs is not supported on this target", "ifunc");
6224 }
6225
6226 # ifdef ASM_OUTPUT_DEF_FROM_DECLS
6227 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target);
6228 # else
6229 ASM_OUTPUT_DEF (asm_out_file,
6230 IDENTIFIER_POINTER (id),
6231 IDENTIFIER_POINTER (target));
6232 # endif
6233 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL)
6234 {
6235 const char *name;
6236 tree *p, t;
6237
6238 name = IDENTIFIER_POINTER (id);
6239 # ifdef ASM_WEAKEN_DECL
6240 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target));
6241 # else
6242 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target));
6243 # endif
6244 /* Remove this function from the pending weak list so that
6245 we do not emit multiple .weak directives for it. */
6246 for (p = &weak_decls; (t = *p) ; )
6247 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))
6248 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6249 *p = TREE_CHAIN (t);
6250 else
6251 p = &TREE_CHAIN (t);
6252
6253 /* Remove weakrefs to the same target from the pending weakref
6254 list, for the same reason. */
6255 for (p = &weakref_targets; (t = *p) ; )
6256 {
6257 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6258 *p = TREE_CHAIN (t);
6259 else
6260 p = &TREE_CHAIN (t);
6261 }
6262 }
6263 #endif
6264 }
6265
6266 /* Output .symver directive. */
6267
6268 void
6269 do_assemble_symver (tree decl, tree target)
6270 {
6271 tree id = DECL_ASSEMBLER_NAME (decl);
6272 ultimate_transparent_alias_target (&id);
6273 ultimate_transparent_alias_target (&target);
6274 #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE
6275 ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file,
6276 IDENTIFIER_POINTER (target),
6277 IDENTIFIER_POINTER (id));
6278 #else
6279 error ("symver is only supported on ELF platforms");
6280 #endif
6281 }
6282
6283 /* Emit an assembler directive to make the symbol for DECL an alias to
6284 the symbol for TARGET. */
6285
6286 void
6287 assemble_alias (tree decl, tree target)
6288 {
6289 tree target_decl;
6290
6291 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6292 {
6293 tree alias = DECL_ASSEMBLER_NAME (decl);
6294
6295 ultimate_transparent_alias_target (&target);
6296
6297 if (alias == target)
6298 error ("%qs symbol %q+D ultimately targets itself", "weakref", decl);
6299 if (TREE_PUBLIC (decl))
6300 error ("%qs symbol %q+D must have static linkage", "weakref", decl);
6301 }
6302 else
6303 {
6304 #if !defined (ASM_OUTPUT_DEF)
6305 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
6306 error_at (DECL_SOURCE_LOCATION (decl),
6307 "alias definitions not supported in this configuration");
6308 TREE_ASM_WRITTEN (decl) = 1;
6309 return;
6310 # else
6311 if (!DECL_WEAK (decl))
6312 {
6313 /* NB: ifunc_resolver isn't set when an error is detected. */
6314 if (TREE_CODE (decl) == FUNCTION_DECL
6315 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
6316 error_at (DECL_SOURCE_LOCATION (decl),
6317 "%qs is not supported in this configuration", "ifunc");
6318 else
6319 error_at (DECL_SOURCE_LOCATION (decl),
6320 "only weak aliases are supported in this configuration");
6321 TREE_ASM_WRITTEN (decl) = 1;
6322 return;
6323 }
6324 # endif
6325 #endif
6326 }
6327 TREE_USED (decl) = 1;
6328
6329 /* Allow aliases to aliases. */
6330 if (TREE_CODE (decl) == FUNCTION_DECL)
6331 cgraph_node::get_create (decl)->alias = true;
6332 else
6333 varpool_node::get_create (decl)->alias = true;
6334
6335 /* If the target has already been emitted, we don't have to queue the
6336 alias. This saves a tad of memory. */
6337 if (symtab->global_info_ready)
6338 target_decl = find_decl (target);
6339 else
6340 target_decl= NULL;
6341 if ((target_decl && TREE_ASM_WRITTEN (target_decl))
6342 || symtab->state >= EXPANSION)
6343 do_assemble_alias (decl, target);
6344 else
6345 {
6346 alias_pair p = {decl, target};
6347 vec_safe_push (alias_pairs, p);
6348 }
6349 }
6350
6351 /* Record and output a table of translations from original function
6352 to its transaction aware clone. Note that tm_pure functions are
6353 considered to be their own clone. */
6354
6355 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
6356 {
6357 static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
6358 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
6359
6360 static int
6361 keep_cache_entry (tree_map *&e)
6362 {
6363 return ggc_marked_p (e->base.from);
6364 }
6365 };
6366
6367 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash;
6368
6369 void
6370 record_tm_clone_pair (tree o, tree n)
6371 {
6372 struct tree_map **slot, *h;
6373
6374 if (tm_clone_hash == NULL)
6375 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
6376
6377 h = ggc_alloc<tree_map> ();
6378 h->hash = htab_hash_pointer (o);
6379 h->base.from = o;
6380 h->to = n;
6381
6382 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
6383 *slot = h;
6384 }
6385
6386 tree
6387 get_tm_clone_pair (tree o)
6388 {
6389 if (tm_clone_hash)
6390 {
6391 struct tree_map *h, in;
6392
6393 in.base.from = o;
6394 in.hash = htab_hash_pointer (o);
6395 h = tm_clone_hash->find_with_hash (&in, in.hash);
6396 if (h)
6397 return h->to;
6398 }
6399 return NULL_TREE;
6400 }
6401
6402 struct tm_alias_pair
6403 {
6404 unsigned int uid;
6405 tree from;
6406 tree to;
6407 };
6408
6409
6410 /* Dump the actual pairs to the .tm_clone_table section. */
6411
6412 static void
6413 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
6414 {
6415 unsigned i;
6416 tm_alias_pair *p;
6417 bool switched = false;
6418
6419 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
6420 {
6421 tree src = p->from;
6422 tree dst = p->to;
6423 struct cgraph_node *src_n = cgraph_node::get (src);
6424 struct cgraph_node *dst_n = cgraph_node::get (dst);
6425
6426 /* The function ipa_tm_create_version() marks the clone as needed if
6427 the original function was needed. But we also mark the clone as
6428 needed if we ever called the clone indirectly through
6429 TM_GETTMCLONE. If neither of these are true, we didn't generate
6430 a clone, and we didn't call it indirectly... no sense keeping it
6431 in the clone table. */
6432 if (!dst_n || !dst_n->definition)
6433 continue;
6434
6435 /* This covers the case where we have optimized the original
6436 function away, and only access the transactional clone. */
6437 if (!src_n || !src_n->definition)
6438 continue;
6439
6440 if (!switched)
6441 {
6442 switch_to_section (targetm.asm_out.tm_clone_table_section ());
6443 assemble_align (POINTER_SIZE);
6444 switched = true;
6445 }
6446
6447 assemble_integer (XEXP (DECL_RTL (src), 0),
6448 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6449 assemble_integer (XEXP (DECL_RTL (dst), 0),
6450 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6451 }
6452 }
6453
6454 /* Provide a default for the tm_clone_table section. */
6455
6456 section *
6457 default_clone_table_section (void)
6458 {
6459 return get_named_section (NULL, ".tm_clone_table", 3);
6460 }
6461
6462 /* Helper comparison function for qsorting by the DECL_UID stored in
6463 alias_pair->emitted_diags. */
6464
6465 static int
6466 tm_alias_pair_cmp (const void *x, const void *y)
6467 {
6468 const tm_alias_pair *p1 = (const tm_alias_pair *) x;
6469 const tm_alias_pair *p2 = (const tm_alias_pair *) y;
6470 if (p1->uid < p2->uid)
6471 return -1;
6472 if (p1->uid > p2->uid)
6473 return 1;
6474 return 0;
6475 }
6476
6477 void
6478 finish_tm_clone_pairs (void)
6479 {
6480 vec<tm_alias_pair> tm_alias_pairs = vNULL;
6481
6482 if (tm_clone_hash == NULL)
6483 return;
6484
6485 /* We need a determenistic order for the .tm_clone_table, otherwise
6486 we will get bootstrap comparison failures, so dump the hash table
6487 to a vector, sort it, and dump the vector. */
6488
6489 /* Dump the hashtable to a vector. */
6490 tree_map *map;
6491 hash_table<tm_clone_hasher>::iterator iter;
6492 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
6493 {
6494 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
6495 tm_alias_pairs.safe_push (p);
6496 }
6497 /* Sort it. */
6498 tm_alias_pairs.qsort (tm_alias_pair_cmp);
6499
6500 /* Dump it. */
6501 dump_tm_clone_pairs (tm_alias_pairs);
6502
6503 tm_clone_hash->empty ();
6504 tm_clone_hash = NULL;
6505 tm_alias_pairs.release ();
6506 }
6507
6508
6509 /* Emit an assembler directive to set symbol for DECL visibility to
6510 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */
6511
6512 void
6513 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED,
6514 int vis ATTRIBUTE_UNUSED)
6515 {
6516 #ifdef HAVE_GAS_HIDDEN
6517 static const char * const visibility_types[] = {
6518 NULL, "protected", "hidden", "internal"
6519 };
6520
6521 const char *name, *type;
6522 tree id;
6523
6524 id = DECL_ASSEMBLER_NAME (decl);
6525 ultimate_transparent_alias_target (&id);
6526 name = IDENTIFIER_POINTER (id);
6527
6528 type = visibility_types[vis];
6529
6530 fprintf (asm_out_file, "\t.%s\t", type);
6531 assemble_name (asm_out_file, name);
6532 fprintf (asm_out_file, "\n");
6533 #else
6534 if (!DECL_ARTIFICIAL (decl))
6535 warning (OPT_Wattributes, "visibility attribute not supported "
6536 "in this configuration; ignored");
6537 #endif
6538 }
6539
6540 /* A helper function to call assemble_visibility when needed for a decl. */
6541
6542 int
6543 maybe_assemble_visibility (tree decl)
6544 {
6545 enum symbol_visibility vis = DECL_VISIBILITY (decl);
6546 if (vis != VISIBILITY_DEFAULT)
6547 {
6548 targetm.asm_out.assemble_visibility (decl, vis);
6549 return 1;
6550 }
6551 else
6552 return 0;
6553 }
6554
6555 /* Returns 1 if the target configuration supports defining public symbols
6556 so that one of them will be chosen at link time instead of generating a
6557 multiply-defined symbol error, whether through the use of weak symbols or
6558 a target-specific mechanism for having duplicates discarded. */
6559
6560 int
6561 supports_one_only (void)
6562 {
6563 if (SUPPORTS_ONE_ONLY)
6564 return 1;
6565 return TARGET_SUPPORTS_WEAK;
6566 }
6567
6568 /* Set up DECL as a public symbol that can be defined in multiple
6569 translation units without generating a linker error. */
6570
6571 void
6572 make_decl_one_only (tree decl, tree comdat_group)
6573 {
6574 struct symtab_node *symbol;
6575 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6576
6577 TREE_PUBLIC (decl) = 1;
6578
6579 if (VAR_P (decl))
6580 symbol = varpool_node::get_create (decl);
6581 else
6582 symbol = cgraph_node::get_create (decl);
6583
6584 if (SUPPORTS_ONE_ONLY)
6585 {
6586 #ifdef MAKE_DECL_ONE_ONLY
6587 MAKE_DECL_ONE_ONLY (decl);
6588 #endif
6589 symbol->set_comdat_group (comdat_group);
6590 }
6591 else if (VAR_P (decl)
6592 && (DECL_INITIAL (decl) == 0
6593 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
6594 DECL_COMMON (decl) = 1;
6595 else
6596 {
6597 gcc_assert (TARGET_SUPPORTS_WEAK);
6598 DECL_WEAK (decl) = 1;
6599 }
6600 }
6601
6602 void
6603 init_varasm_once (void)
6604 {
6605 section_htab = hash_table<section_hasher>::create_ggc (31);
6606 object_block_htab = hash_table<object_block_hasher>::create_ggc (31);
6607 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009);
6608
6609 shared_constant_pool = create_constant_pool ();
6610
6611 #ifdef TEXT_SECTION_ASM_OP
6612 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6613 TEXT_SECTION_ASM_OP);
6614 #endif
6615
6616 #ifdef DATA_SECTION_ASM_OP
6617 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6618 DATA_SECTION_ASM_OP);
6619 #endif
6620
6621 #ifdef SDATA_SECTION_ASM_OP
6622 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6623 SDATA_SECTION_ASM_OP);
6624 #endif
6625
6626 #ifdef READONLY_DATA_SECTION_ASM_OP
6627 readonly_data_section = get_unnamed_section (0, output_section_asm_op,
6628 READONLY_DATA_SECTION_ASM_OP);
6629 #endif
6630
6631 #ifdef CTORS_SECTION_ASM_OP
6632 ctors_section = get_unnamed_section (0, output_section_asm_op,
6633 CTORS_SECTION_ASM_OP);
6634 #endif
6635
6636 #ifdef DTORS_SECTION_ASM_OP
6637 dtors_section = get_unnamed_section (0, output_section_asm_op,
6638 DTORS_SECTION_ASM_OP);
6639 #endif
6640
6641 #ifdef BSS_SECTION_ASM_OP
6642 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6643 output_section_asm_op,
6644 BSS_SECTION_ASM_OP);
6645 #endif
6646
6647 #ifdef SBSS_SECTION_ASM_OP
6648 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6649 output_section_asm_op,
6650 SBSS_SECTION_ASM_OP);
6651 #endif
6652
6653 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6654 | SECTION_COMMON, emit_tls_common);
6655 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6656 | SECTION_COMMON, emit_local);
6657 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6658 | SECTION_COMMON, emit_common);
6659
6660 #if defined ASM_OUTPUT_ALIGNED_BSS
6661 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS,
6662 emit_bss);
6663 #endif
6664
6665 targetm.asm_out.init_sections ();
6666
6667 if (readonly_data_section == NULL)
6668 readonly_data_section = text_section;
6669
6670 #ifdef ASM_OUTPUT_EXTERNAL
6671 pending_assemble_externals_set = new hash_set<tree>;
6672 #endif
6673 }
6674
6675 enum tls_model
6676 decl_default_tls_model (const_tree decl)
6677 {
6678 enum tls_model kind;
6679 bool is_local;
6680
6681 is_local = targetm.binds_local_p (decl);
6682 if (!flag_shlib)
6683 {
6684 if (is_local)
6685 kind = TLS_MODEL_LOCAL_EXEC;
6686 else
6687 kind = TLS_MODEL_INITIAL_EXEC;
6688 }
6689
6690 /* Local dynamic is inefficient when we're not combining the
6691 parts of the address. */
6692 else if (optimize && is_local)
6693 kind = TLS_MODEL_LOCAL_DYNAMIC;
6694 else
6695 kind = TLS_MODEL_GLOBAL_DYNAMIC;
6696 if (kind < flag_tls_default)
6697 kind = flag_tls_default;
6698
6699 return kind;
6700 }
6701
6702 /* Select a set of attributes for section NAME based on the properties
6703 of DECL and whether or not RELOC indicates that DECL's initializer
6704 might contain runtime relocations.
6705
6706 We make the section read-only and executable for a function decl,
6707 read-only for a const data decl, and writable for a non-const data decl. */
6708
6709 unsigned int
6710 default_section_type_flags (tree decl, const char *name, int reloc)
6711 {
6712 unsigned int flags;
6713
6714 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
6715 flags = SECTION_CODE;
6716 else if (decl)
6717 {
6718 enum section_category category
6719 = categorize_decl_for_section (decl, reloc);
6720 if (decl_readonly_section_1 (category))
6721 flags = 0;
6722 else if (category == SECCAT_DATA_REL_RO
6723 || category == SECCAT_DATA_REL_RO_LOCAL)
6724 flags = SECTION_WRITE | SECTION_RELRO;
6725 else
6726 flags = SECTION_WRITE;
6727 }
6728 else
6729 {
6730 flags = SECTION_WRITE;
6731 if (strcmp (name, ".data.rel.ro") == 0
6732 || strcmp (name, ".data.rel.ro.local") == 0)
6733 flags |= SECTION_RELRO;
6734 }
6735
6736 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl))
6737 flags |= SECTION_LINKONCE;
6738
6739 if (strcmp (name, ".vtable_map_vars") == 0)
6740 flags |= SECTION_LINKONCE;
6741
6742 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6743 flags |= SECTION_TLS | SECTION_WRITE;
6744
6745 if (strcmp (name, ".bss") == 0
6746 || startswith (name, ".bss.")
6747 || startswith (name, ".gnu.linkonce.b.")
6748 || strcmp (name, ".persistent.bss") == 0
6749 || strcmp (name, ".sbss") == 0
6750 || startswith (name, ".sbss.")
6751 || startswith (name, ".gnu.linkonce.sb."))
6752 flags |= SECTION_BSS;
6753
6754 if (strcmp (name, ".tdata") == 0
6755 || startswith (name, ".tdata.")
6756 || startswith (name, ".gnu.linkonce.td."))
6757 flags |= SECTION_TLS;
6758
6759 if (strcmp (name, ".tbss") == 0
6760 || startswith (name, ".tbss.")
6761 || startswith (name, ".gnu.linkonce.tb."))
6762 flags |= SECTION_TLS | SECTION_BSS;
6763
6764 if (strcmp (name, ".noinit") == 0)
6765 flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE;
6766
6767 if (strcmp (name, ".persistent") == 0)
6768 flags |= SECTION_WRITE | SECTION_NOTYPE;
6769
6770 /* Various sections have special ELF types that the assembler will
6771 assign by default based on the name. They are neither SHT_PROGBITS
6772 nor SHT_NOBITS, so when changing sections we don't want to print a
6773 section type (@progbits or @nobits). Rather than duplicating the
6774 assembler's knowledge of what those special name patterns are, just
6775 let the assembler choose the type if we don't know a specific
6776 reason to set it to something other than the default. SHT_PROGBITS
6777 is the default for sections whose name is not specially known to
6778 the assembler, so it does no harm to leave the choice to the
6779 assembler when @progbits is the best thing we know to use. If
6780 someone is silly enough to emit code or TLS variables to one of
6781 these sections, then don't handle them specially.
6782
6783 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
6784 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
6785 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
6786 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
6787 flags |= SECTION_NOTYPE;
6788
6789 return flags;
6790 }
6791
6792 /* Return true if the target supports some form of global BSS,
6793 either through bss_noswitch_section, or by selecting a BSS
6794 section in TARGET_ASM_SELECT_SECTION. */
6795
6796 bool
6797 have_global_bss_p (void)
6798 {
6799 return bss_noswitch_section || targetm.have_switchable_bss_sections;
6800 }
6801
6802 /* Output assembly to switch to section NAME with attribute FLAGS.
6803 Four variants for common object file formats. */
6804
6805 void
6806 default_no_named_section (const char *name ATTRIBUTE_UNUSED,
6807 unsigned int flags ATTRIBUTE_UNUSED,
6808 tree decl ATTRIBUTE_UNUSED)
6809 {
6810 /* Some object formats don't support named sections at all. The
6811 front-end should already have flagged this as an error. */
6812 gcc_unreachable ();
6813 }
6814
6815 #ifndef TLS_SECTION_ASM_FLAG
6816 #define TLS_SECTION_ASM_FLAG 'T'
6817 #endif
6818
6819 void
6820 default_elf_asm_named_section (const char *name, unsigned int flags,
6821 tree decl)
6822 {
6823 char flagchars[11], *f = flagchars;
6824 unsigned int numeric_value = 0;
6825
6826 /* If we have already declared this section, we can use an
6827 abbreviated form to switch back to it -- unless this section is
6828 part of a COMDAT groups or with SHF_GNU_RETAIN or with SHF_LINK_ORDER,
6829 in which case GAS requires the full declaration every time. */
6830 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6831 && !(flags & (SECTION_RETAIN | SECTION_LINK_ORDER))
6832 && (flags & SECTION_DECLARED))
6833 {
6834 fprintf (asm_out_file, "\t.section\t%s\n", name);
6835 return;
6836 }
6837
6838 /* If we have a machine specific flag, then use the numeric value to pass
6839 this on to GAS. */
6840 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value))
6841 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value);
6842 else
6843 {
6844 if (!(flags & SECTION_DEBUG))
6845 *f++ = 'a';
6846 #if HAVE_GAS_SECTION_EXCLUDE
6847 if (flags & SECTION_EXCLUDE)
6848 *f++ = 'e';
6849 #endif
6850 if (flags & SECTION_WRITE)
6851 *f++ = 'w';
6852 if (flags & SECTION_CODE)
6853 *f++ = 'x';
6854 if (flags & SECTION_SMALL)
6855 *f++ = 's';
6856 if (flags & SECTION_MERGE)
6857 *f++ = 'M';
6858 if (flags & SECTION_STRINGS)
6859 *f++ = 'S';
6860 if (flags & SECTION_TLS)
6861 *f++ = TLS_SECTION_ASM_FLAG;
6862 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6863 *f++ = 'G';
6864 if (flags & SECTION_RETAIN)
6865 *f++ = 'R';
6866 if (flags & SECTION_LINK_ORDER)
6867 *f++ = 'o';
6868 #ifdef MACH_DEP_SECTION_ASM_FLAG
6869 if (flags & SECTION_MACH_DEP)
6870 *f++ = MACH_DEP_SECTION_ASM_FLAG;
6871 #endif
6872 *f = '\0';
6873 }
6874
6875 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
6876
6877 /* default_section_type_flags (above) knows which flags need special
6878 handling here, and sets NOTYPE when none of these apply so that the
6879 assembler's logic for default types can apply to user-chosen
6880 section names. */
6881 if (!(flags & SECTION_NOTYPE))
6882 {
6883 const char *type;
6884 const char *format;
6885
6886 if (flags & SECTION_BSS)
6887 type = "nobits";
6888 else
6889 type = "progbits";
6890
6891 format = ",@%s";
6892 /* On platforms that use "@" as the assembly comment character,
6893 use "%" instead. */
6894 if (strcmp (ASM_COMMENT_START, "@") == 0)
6895 format = ",%%%s";
6896 fprintf (asm_out_file, format, type);
6897
6898 if (flags & SECTION_ENTSIZE)
6899 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
6900 if (flags & SECTION_LINK_ORDER)
6901 {
6902 tree id = DECL_ASSEMBLER_NAME (decl);
6903 ultimate_transparent_alias_target (&id);
6904 const char *name = IDENTIFIER_POINTER (id);
6905 name = targetm.strip_name_encoding (name);
6906 fprintf (asm_out_file, ",%s", name);
6907 }
6908 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6909 {
6910 if (TREE_CODE (decl) == IDENTIFIER_NODE)
6911 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl));
6912 else
6913 fprintf (asm_out_file, ",%s,comdat",
6914 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
6915 }
6916 }
6917
6918 putc ('\n', asm_out_file);
6919 }
6920
6921 void
6922 default_coff_asm_named_section (const char *name, unsigned int flags,
6923 tree decl ATTRIBUTE_UNUSED)
6924 {
6925 char flagchars[8], *f = flagchars;
6926
6927 if (flags & SECTION_WRITE)
6928 *f++ = 'w';
6929 if (flags & SECTION_CODE)
6930 *f++ = 'x';
6931 *f = '\0';
6932
6933 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars);
6934 }
6935
6936 void
6937 default_pe_asm_named_section (const char *name, unsigned int flags,
6938 tree decl)
6939 {
6940 default_coff_asm_named_section (name, flags, decl);
6941
6942 if (flags & SECTION_LINKONCE)
6943 {
6944 /* Functions may have been compiled at various levels of
6945 optimization so we can't use `same_size' here.
6946 Instead, have the linker pick one. */
6947 fprintf (asm_out_file, "\t.linkonce %s\n",
6948 (flags & SECTION_CODE ? "discard" : "same_size"));
6949 }
6950 }
6951 \f
6952 /* The lame default section selector. */
6953
6954 section *
6955 default_select_section (tree decl, int reloc,
6956 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6957 {
6958 if (DECL_P (decl))
6959 {
6960 if (decl_readonly_section (decl, reloc))
6961 return readonly_data_section;
6962 }
6963 else if (TREE_CODE (decl) == CONSTRUCTOR)
6964 {
6965 if (! ((flag_pic && reloc)
6966 || !TREE_READONLY (decl)
6967 || TREE_SIDE_EFFECTS (decl)
6968 || !TREE_CONSTANT (decl)))
6969 return readonly_data_section;
6970 }
6971 else if (TREE_CODE (decl) == STRING_CST)
6972 return readonly_data_section;
6973 else if (! (flag_pic && reloc))
6974 return readonly_data_section;
6975
6976 return data_section;
6977 }
6978
6979 enum section_category
6980 categorize_decl_for_section (const_tree decl, int reloc)
6981 {
6982 enum section_category ret;
6983
6984 if (TREE_CODE (decl) == FUNCTION_DECL)
6985 return SECCAT_TEXT;
6986 else if (TREE_CODE (decl) == STRING_CST)
6987 {
6988 if ((flag_sanitize & SANITIZE_ADDRESS)
6989 && asan_protect_global (CONST_CAST_TREE (decl)))
6990 /* or !flag_merge_constants */
6991 return SECCAT_RODATA;
6992 else
6993 return SECCAT_RODATA_MERGE_STR;
6994 }
6995 else if (VAR_P (decl))
6996 {
6997 tree d = CONST_CAST_TREE (decl);
6998 if (bss_initializer_p (decl))
6999 ret = SECCAT_BSS;
7000 else if (! TREE_READONLY (decl)
7001 || TREE_SIDE_EFFECTS (decl)
7002 || (DECL_INITIAL (decl)
7003 && ! TREE_CONSTANT (DECL_INITIAL (decl))))
7004 {
7005 /* Here the reloc_rw_mask is not testing whether the section should
7006 be read-only or not, but whether the dynamic link will have to
7007 do something. If so, we wish to segregate the data in order to
7008 minimize cache misses inside the dynamic linker. */
7009 if (reloc & targetm.asm_out.reloc_rw_mask ())
7010 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
7011 else
7012 ret = SECCAT_DATA;
7013 }
7014 else if (reloc & targetm.asm_out.reloc_rw_mask ())
7015 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
7016 else if (reloc || flag_merge_constants < 2
7017 || ((flag_sanitize & SANITIZE_ADDRESS)
7018 /* PR 81697: for architectures that use section anchors we
7019 need to ignore DECL_RTL_SET_P (decl) for string constants
7020 inside this asan_protect_global call because otherwise
7021 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
7022 section, set DECL_RTL (decl) later on and add DECL to
7023 protected globals via successive asan_protect_global
7024 calls. In this scenario we'll end up with wrong
7025 alignment of these strings at runtime and possible ASan
7026 false positives. */
7027 && asan_protect_global (d, use_object_blocks_p ()
7028 && use_blocks_for_decl_p (d))))
7029 /* C and C++ don't allow different variables to share the same
7030 location. -fmerge-all-constants allows even that (at the
7031 expense of not conforming). */
7032 ret = SECCAT_RODATA;
7033 else if (DECL_INITIAL (decl)
7034 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
7035 ret = SECCAT_RODATA_MERGE_STR_INIT;
7036 else
7037 ret = SECCAT_RODATA_MERGE_CONST;
7038 }
7039 else if (TREE_CODE (decl) == CONSTRUCTOR)
7040 {
7041 if ((reloc & targetm.asm_out.reloc_rw_mask ())
7042 || TREE_SIDE_EFFECTS (decl)
7043 || ! TREE_CONSTANT (decl))
7044 ret = SECCAT_DATA;
7045 else
7046 ret = SECCAT_RODATA;
7047 }
7048 else
7049 ret = SECCAT_RODATA;
7050
7051 /* There are no read-only thread-local sections. */
7052 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7053 {
7054 /* Note that this would be *just* SECCAT_BSS, except that there's
7055 no concept of a read-only thread-local-data section. */
7056 if (ret == SECCAT_BSS
7057 || DECL_INITIAL (decl) == NULL
7058 || (flag_zero_initialized_in_bss
7059 && initializer_zerop (DECL_INITIAL (decl))))
7060 ret = SECCAT_TBSS;
7061 else
7062 ret = SECCAT_TDATA;
7063 }
7064
7065 /* If the target uses small data sections, select it. */
7066 else if (targetm.in_small_data_p (decl))
7067 {
7068 if (ret == SECCAT_BSS)
7069 ret = SECCAT_SBSS;
7070 else if (targetm.have_srodata_section && ret == SECCAT_RODATA)
7071 ret = SECCAT_SRODATA;
7072 else
7073 ret = SECCAT_SDATA;
7074 }
7075
7076 return ret;
7077 }
7078
7079 static bool
7080 decl_readonly_section_1 (enum section_category category)
7081 {
7082 switch (category)
7083 {
7084 case SECCAT_RODATA:
7085 case SECCAT_RODATA_MERGE_STR:
7086 case SECCAT_RODATA_MERGE_STR_INIT:
7087 case SECCAT_RODATA_MERGE_CONST:
7088 case SECCAT_SRODATA:
7089 return true;
7090 default:
7091 return false;
7092 }
7093 }
7094
7095 bool
7096 decl_readonly_section (const_tree decl, int reloc)
7097 {
7098 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc));
7099 }
7100
7101 /* Select a section based on the above categorization. */
7102
7103 section *
7104 default_elf_select_section (tree decl, int reloc,
7105 unsigned HOST_WIDE_INT align)
7106 {
7107 const char *sname;
7108
7109 switch (categorize_decl_for_section (decl, reloc))
7110 {
7111 case SECCAT_TEXT:
7112 /* We're not supposed to be called on FUNCTION_DECLs. */
7113 gcc_unreachable ();
7114 case SECCAT_RODATA:
7115 return readonly_data_section;
7116 case SECCAT_RODATA_MERGE_STR:
7117 return mergeable_string_section (decl, align, 0);
7118 case SECCAT_RODATA_MERGE_STR_INIT:
7119 return mergeable_string_section (DECL_INITIAL (decl), align, 0);
7120 case SECCAT_RODATA_MERGE_CONST:
7121 return mergeable_constant_section (DECL_MODE (decl), align, 0);
7122 case SECCAT_SRODATA:
7123 sname = ".sdata2";
7124 break;
7125 case SECCAT_DATA:
7126 if (DECL_P (decl) && DECL_PERSISTENT_P (decl))
7127 {
7128 sname = ".persistent";
7129 break;
7130 }
7131 return data_section;
7132 case SECCAT_DATA_REL:
7133 sname = ".data.rel";
7134 break;
7135 case SECCAT_DATA_REL_LOCAL:
7136 sname = ".data.rel.local";
7137 break;
7138 case SECCAT_DATA_REL_RO:
7139 sname = ".data.rel.ro";
7140 break;
7141 case SECCAT_DATA_REL_RO_LOCAL:
7142 sname = ".data.rel.ro.local";
7143 break;
7144 case SECCAT_SDATA:
7145 sname = ".sdata";
7146 break;
7147 case SECCAT_TDATA:
7148 sname = ".tdata";
7149 break;
7150 case SECCAT_BSS:
7151 if (DECL_P (decl) && DECL_NOINIT_P (decl))
7152 {
7153 sname = ".noinit";
7154 break;
7155 }
7156 if (bss_section)
7157 return bss_section;
7158 sname = ".bss";
7159 break;
7160 case SECCAT_SBSS:
7161 sname = ".sbss";
7162 break;
7163 case SECCAT_TBSS:
7164 sname = ".tbss";
7165 break;
7166 default:
7167 gcc_unreachable ();
7168 }
7169
7170 return get_named_section (decl, sname, reloc);
7171 }
7172
7173 /* Construct a unique section name based on the decl name and the
7174 categorization performed above. */
7175
7176 void
7177 default_unique_section (tree decl, int reloc)
7178 {
7179 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
7180 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
7181 const char *prefix, *name, *linkonce;
7182 char *string;
7183 tree id;
7184
7185 switch (categorize_decl_for_section (decl, reloc))
7186 {
7187 case SECCAT_TEXT:
7188 prefix = one_only ? ".t" : ".text";
7189 break;
7190 case SECCAT_RODATA:
7191 case SECCAT_RODATA_MERGE_STR:
7192 case SECCAT_RODATA_MERGE_STR_INIT:
7193 case SECCAT_RODATA_MERGE_CONST:
7194 prefix = one_only ? ".r" : ".rodata";
7195 break;
7196 case SECCAT_SRODATA:
7197 prefix = one_only ? ".s2" : ".sdata2";
7198 break;
7199 case SECCAT_DATA:
7200 prefix = one_only ? ".d" : ".data";
7201 if (DECL_P (decl) && DECL_PERSISTENT_P (decl))
7202 {
7203 prefix = one_only ? ".p" : ".persistent";
7204 break;
7205 }
7206 break;
7207 case SECCAT_DATA_REL:
7208 prefix = one_only ? ".d.rel" : ".data.rel";
7209 break;
7210 case SECCAT_DATA_REL_LOCAL:
7211 prefix = one_only ? ".d.rel.local" : ".data.rel.local";
7212 break;
7213 case SECCAT_DATA_REL_RO:
7214 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
7215 break;
7216 case SECCAT_DATA_REL_RO_LOCAL:
7217 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
7218 break;
7219 case SECCAT_SDATA:
7220 prefix = one_only ? ".s" : ".sdata";
7221 break;
7222 case SECCAT_BSS:
7223 if (DECL_P (decl) && DECL_NOINIT_P (decl))
7224 {
7225 prefix = one_only ? ".n" : ".noinit";
7226 break;
7227 }
7228 prefix = one_only ? ".b" : ".bss";
7229 break;
7230 case SECCAT_SBSS:
7231 prefix = one_only ? ".sb" : ".sbss";
7232 break;
7233 case SECCAT_TDATA:
7234 prefix = one_only ? ".td" : ".tdata";
7235 break;
7236 case SECCAT_TBSS:
7237 prefix = one_only ? ".tb" : ".tbss";
7238 break;
7239 default:
7240 gcc_unreachable ();
7241 }
7242
7243 id = DECL_ASSEMBLER_NAME (decl);
7244 ultimate_transparent_alias_target (&id);
7245 name = IDENTIFIER_POINTER (id);
7246 name = targetm.strip_name_encoding (name);
7247
7248 /* If we're using one_only, then there needs to be a .gnu.linkonce
7249 prefix to the section name. */
7250 linkonce = one_only ? ".gnu.linkonce" : "";
7251
7252 string = ACONCAT ((linkonce, prefix, ".", name, NULL));
7253
7254 set_decl_section_name (decl, string);
7255 }
7256
7257 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */
7258
7259 static int
7260 compute_reloc_for_rtx_1 (const_rtx x)
7261 {
7262 switch (GET_CODE (x))
7263 {
7264 case SYMBOL_REF:
7265 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
7266 case LABEL_REF:
7267 return 1;
7268 default:
7269 return 0;
7270 }
7271 }
7272
7273 /* Like compute_reloc_for_constant, except for an RTX. The return value
7274 is a mask for which bit 1 indicates a global relocation, and bit 0
7275 indicates a local relocation. Used by default_select_rtx_section
7276 and default_elf_select_rtx_section. */
7277
7278 static int
7279 compute_reloc_for_rtx (const_rtx x)
7280 {
7281 switch (GET_CODE (x))
7282 {
7283 case SYMBOL_REF:
7284 case LABEL_REF:
7285 return compute_reloc_for_rtx_1 (x);
7286
7287 case CONST:
7288 {
7289 int reloc = 0;
7290 subrtx_iterator::array_type array;
7291 FOR_EACH_SUBRTX (iter, array, x, ALL)
7292 reloc |= compute_reloc_for_rtx_1 (*iter);
7293 return reloc;
7294 }
7295
7296 default:
7297 return 0;
7298 }
7299 }
7300
7301 section *
7302 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
7303 rtx x,
7304 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
7305 {
7306 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
7307 return data_section;
7308 else
7309 return readonly_data_section;
7310 }
7311
7312 section *
7313 default_elf_select_rtx_section (machine_mode mode, rtx x,
7314 unsigned HOST_WIDE_INT align)
7315 {
7316 int reloc = compute_reloc_for_rtx (x);
7317
7318 /* ??? Handle small data here somehow. */
7319
7320 if (reloc & targetm.asm_out.reloc_rw_mask ())
7321 {
7322 if (reloc == 1)
7323 return get_named_section (NULL, ".data.rel.ro.local", 1);
7324 else
7325 return get_named_section (NULL, ".data.rel.ro", 3);
7326 }
7327
7328 return mergeable_constant_section (mode, align, 0);
7329 }
7330
7331 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */
7332
7333 void
7334 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
7335 {
7336 rtx symbol;
7337 int flags;
7338
7339 /* Careful not to prod global register variables. */
7340 if (!MEM_P (rtl))
7341 return;
7342 symbol = XEXP (rtl, 0);
7343 if (GET_CODE (symbol) != SYMBOL_REF)
7344 return;
7345
7346 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO;
7347 if (TREE_CODE (decl) == FUNCTION_DECL)
7348 flags |= SYMBOL_FLAG_FUNCTION;
7349 if (targetm.binds_local_p (decl))
7350 flags |= SYMBOL_FLAG_LOCAL;
7351 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7352 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
7353 else if (targetm.in_small_data_p (decl))
7354 flags |= SYMBOL_FLAG_SMALL;
7355 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without
7356 being PUBLIC, the thing *must* be defined in this translation unit.
7357 Prevent this buglet from being propagated into rtl code as well. */
7358 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
7359 flags |= SYMBOL_FLAG_EXTERNAL;
7360
7361 SYMBOL_REF_FLAGS (symbol) = flags;
7362 }
7363
7364 /* By default, we do nothing for encode_section_info, so we need not
7365 do anything but discard the '*' marker. */
7366
7367 const char *
7368 default_strip_name_encoding (const char *str)
7369 {
7370 return str + (*str == '*');
7371 }
7372
7373 #ifdef ASM_OUTPUT_DEF
7374 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
7375 anchor relative to ".", the current section position. */
7376
7377 void
7378 default_asm_output_anchor (rtx symbol)
7379 {
7380 char buffer[100];
7381
7382 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
7383 SYMBOL_REF_BLOCK_OFFSET (symbol));
7384 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
7385 }
7386 #endif
7387
7388 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
7389
7390 bool
7391 default_use_anchors_for_symbol_p (const_rtx symbol)
7392 {
7393 tree decl;
7394 section *sect = SYMBOL_REF_BLOCK (symbol)->sect;
7395
7396 /* This function should only be called with non-zero SYMBOL_REF_BLOCK,
7397 furthermore get_block_for_section should not create object blocks
7398 for mergeable sections. */
7399 gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE));
7400
7401 /* Don't use anchors for small data sections. The small data register
7402 acts as an anchor for such sections. */
7403 if (sect->common.flags & SECTION_SMALL)
7404 return false;
7405
7406 decl = SYMBOL_REF_DECL (symbol);
7407 if (decl && DECL_P (decl))
7408 {
7409 /* Don't use section anchors for decls that might be defined or
7410 usurped by other modules. */
7411 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl))
7412 return false;
7413
7414 /* Don't use section anchors for decls that will be placed in a
7415 small data section. */
7416 /* ??? Ideally, this check would be redundant with the SECTION_SMALL
7417 one above. The problem is that we only use SECTION_SMALL for
7418 sections that should be marked as small in the section directive. */
7419 if (targetm.in_small_data_p (decl))
7420 return false;
7421
7422 /* Don't use section anchors for decls that won't fit inside a single
7423 anchor range to reduce the amount of instructions required to refer
7424 to the entire declaration. */
7425 if (DECL_SIZE_UNIT (decl) == NULL_TREE
7426 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))
7427 || (tree_to_uhwi (DECL_SIZE_UNIT (decl))
7428 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset))
7429 return false;
7430
7431 }
7432 return true;
7433 }
7434
7435 /* Return true when RESOLUTION indicate that symbol will be bound to the
7436 definition provided by current .o file. */
7437
7438 static bool
7439 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution)
7440 {
7441 return (resolution == LDPR_PREVAILING_DEF
7442 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7443 || resolution == LDPR_PREVAILING_DEF_IRONLY);
7444 }
7445
7446 /* Return true when RESOLUTION indicate that symbol will be bound locally
7447 within current executable or DSO. */
7448
7449 static bool
7450 resolution_local_p (enum ld_plugin_symbol_resolution resolution)
7451 {
7452 return (resolution == LDPR_PREVAILING_DEF
7453 || resolution == LDPR_PREVAILING_DEF_IRONLY
7454 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7455 || resolution == LDPR_PREEMPTED_REG
7456 || resolution == LDPR_PREEMPTED_IR
7457 || resolution == LDPR_RESOLVED_IR
7458 || resolution == LDPR_RESOLVED_EXEC);
7459 }
7460
7461 /* COMMON_LOCAL_P is true means that the linker can guarantee that an
7462 uninitialized common symbol in the executable will still be defined
7463 (through COPY relocation) in the executable. */
7464
7465 bool
7466 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate,
7467 bool extern_protected_data, bool common_local_p)
7468 {
7469 /* A non-decl is an entry in the constant pool. */
7470 if (!DECL_P (exp))
7471 return true;
7472
7473 /* Weakrefs may not bind locally, even though the weakref itself is always
7474 static and therefore local. Similarly, the resolver for ifunc functions
7475 might resolve to a non-local function.
7476 FIXME: We can resolve the weakref case more curefuly by looking at the
7477 weakref alias. */
7478 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
7479 || (TREE_CODE (exp) == FUNCTION_DECL
7480 && cgraph_node::get (exp)
7481 && cgraph_node::get (exp)->ifunc_resolver))
7482 return false;
7483
7484 /* Static variables are always local. */
7485 if (! TREE_PUBLIC (exp))
7486 return true;
7487
7488 /* With resolution file in hand, take look into resolutions.
7489 We can't just return true for resolved_locally symbols,
7490 because dynamic linking might overwrite symbols
7491 in shared libraries. */
7492 bool resolved_locally = false;
7493
7494 bool uninited_common = (DECL_COMMON (exp)
7495 && (DECL_INITIAL (exp) == NULL
7496 || (!in_lto_p
7497 && DECL_INITIAL (exp) == error_mark_node)));
7498
7499 /* A non-external variable is defined locally only if it isn't
7500 uninitialized COMMON variable or common_local_p is true. */
7501 bool defined_locally = (!DECL_EXTERNAL (exp)
7502 && (!uninited_common || common_local_p));
7503 if (symtab_node *node = symtab_node::get (exp))
7504 {
7505 if (node->in_other_partition)
7506 defined_locally = true;
7507 if (node->can_be_discarded_p ())
7508 ;
7509 else if (resolution_to_local_definition_p (node->resolution))
7510 defined_locally = resolved_locally = true;
7511 else if (resolution_local_p (node->resolution))
7512 resolved_locally = true;
7513 }
7514 if (defined_locally && weak_dominate && !shlib)
7515 resolved_locally = true;
7516
7517 /* Undefined weak symbols are never defined locally. */
7518 if (DECL_WEAK (exp) && !defined_locally)
7519 return false;
7520
7521 /* A symbol is local if the user has said explicitly that it will be,
7522 or if we have a definition for the symbol. We cannot infer visibility
7523 for undefined symbols. */
7524 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT
7525 && (TREE_CODE (exp) == FUNCTION_DECL
7526 || !extern_protected_data
7527 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED)
7528 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally))
7529 return true;
7530
7531 /* If PIC, then assume that any global name can be overridden by
7532 symbols resolved from other modules. */
7533 if (shlib)
7534 return false;
7535
7536 /* Variables defined outside this object might not be local. */
7537 if (DECL_EXTERNAL (exp) && !resolved_locally)
7538 return false;
7539
7540 /* Non-dominant weak symbols are not defined locally. */
7541 if (DECL_WEAK (exp) && !resolved_locally)
7542 return false;
7543
7544 /* Uninitialized COMMON variable may be unified with symbols
7545 resolved from other modules. */
7546 if (uninited_common && !resolved_locally)
7547 return false;
7548
7549 /* Otherwise we're left with initialized (or non-common) global data
7550 which is of necessity defined locally. */
7551 return true;
7552 }
7553
7554 /* Assume ELF-ish defaults, since that's pretty much the most liberal
7555 wrt cross-module name binding. */
7556
7557 bool
7558 default_binds_local_p (const_tree exp)
7559 {
7560 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false);
7561 }
7562
7563 /* Similar to default_binds_local_p, but common symbol may be local and
7564 extern protected data is non-local. */
7565
7566 bool
7567 default_binds_local_p_2 (const_tree exp)
7568 {
7569 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true,
7570 !flag_pic);
7571 }
7572
7573 bool
7574 default_binds_local_p_1 (const_tree exp, int shlib)
7575 {
7576 return default_binds_local_p_3 (exp, shlib != 0, false, false, false);
7577 }
7578
7579 /* Return true when references to DECL must bind to current definition in
7580 final executable.
7581
7582 The condition is usually equivalent to whether the function binds to the
7583 current module (shared library or executable), that is to binds_local_p.
7584 We use this fact to avoid need for another target hook and implement
7585 the logic using binds_local_p and just special cases where
7586 decl_binds_to_current_def_p is stronger than binds_local_p. In particular
7587 the weak definitions (that can be overwritten at linktime by other
7588 definition from different object file) and when resolution info is available
7589 we simply use the knowledge passed to us by linker plugin. */
7590 bool
7591 decl_binds_to_current_def_p (const_tree decl)
7592 {
7593 gcc_assert (DECL_P (decl));
7594 if (!targetm.binds_local_p (decl))
7595 return false;
7596 if (!TREE_PUBLIC (decl))
7597 return true;
7598
7599 /* When resolution is available, just use it. */
7600 if (symtab_node *node = symtab_node::get (decl))
7601 {
7602 if (node->resolution != LDPR_UNKNOWN
7603 && !node->can_be_discarded_p ())
7604 return resolution_to_local_definition_p (node->resolution);
7605 }
7606
7607 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
7608 binds locally but still can be overwritten), DECL_COMMON (can be merged
7609 with a non-common definition somewhere in the same module) or
7610 DECL_EXTERNAL.
7611 This rely on fact that binds_local_p behave as decl_replaceable_p
7612 for all other declaration types. */
7613 if (DECL_WEAK (decl))
7614 return false;
7615 if (DECL_COMMON (decl)
7616 && (DECL_INITIAL (decl) == NULL
7617 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
7618 return false;
7619 if (DECL_EXTERNAL (decl))
7620 return false;
7621 return true;
7622 }
7623
7624 /* A replaceable function or variable is one which may be replaced
7625 at link-time with an entirely different definition, provided that the
7626 replacement has the same type. For example, functions declared
7627 with __attribute__((weak)) on most systems are replaceable.
7628 If SEMANTIC_INTERPOSITION_P is false allow interposition only on
7629 symbols explicitly declared weak.
7630
7631 COMDAT functions are not replaceable, since all definitions of the
7632 function must be equivalent. It is important that COMDAT functions
7633 not be treated as replaceable so that use of C++ template
7634 instantiations is not penalized. */
7635
7636 bool
7637 decl_replaceable_p (tree decl, bool semantic_interposition_p)
7638 {
7639 gcc_assert (DECL_P (decl));
7640 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl))
7641 return false;
7642 if (!semantic_interposition_p
7643 && !DECL_WEAK (decl))
7644 return false;
7645 return !decl_binds_to_current_def_p (decl);
7646 }
7647
7648 /* Default function to output code that will globalize a label. A
7649 target must define GLOBAL_ASM_OP or provide its own function to
7650 globalize a label. */
7651 #ifdef GLOBAL_ASM_OP
7652 void
7653 default_globalize_label (FILE * stream, const char *name)
7654 {
7655 fputs (GLOBAL_ASM_OP, stream);
7656 assemble_name (stream, name);
7657 putc ('\n', stream);
7658 }
7659 #endif /* GLOBAL_ASM_OP */
7660
7661 /* Default function to output code that will globalize a declaration. */
7662 void
7663 default_globalize_decl_name (FILE * stream, tree decl)
7664 {
7665 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7666 targetm.asm_out.globalize_label (stream, name);
7667 }
7668
7669 /* Default function to output a label for unwind information. The
7670 default is to do nothing. A target that needs nonlocal labels for
7671 unwind information must provide its own function to do this. */
7672 void
7673 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
7674 tree decl ATTRIBUTE_UNUSED,
7675 int for_eh ATTRIBUTE_UNUSED,
7676 int empty ATTRIBUTE_UNUSED)
7677 {
7678 }
7679
7680 /* Default function to output a label to divide up the exception table.
7681 The default is to do nothing. A target that needs/wants to divide
7682 up the table must provide it's own function to do this. */
7683 void
7684 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED)
7685 {
7686 }
7687
7688 /* This is how to output an internal numbered label where PREFIX is
7689 the class of label and LABELNO is the number within the class. */
7690
7691 void
7692 default_generate_internal_label (char *buf, const char *prefix,
7693 unsigned long labelno)
7694 {
7695 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7696 }
7697
7698 /* This is how to output an internal numbered label where PREFIX is
7699 the class of label and LABELNO is the number within the class. */
7700
7701 void
7702 default_internal_label (FILE *stream, const char *prefix,
7703 unsigned long labelno)
7704 {
7705 char *const buf = (char *) alloca (40 + strlen (prefix));
7706 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7707 ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
7708 }
7709
7710
7711 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */
7712
7713 void
7714 default_asm_declare_constant_name (FILE *file, const char *name,
7715 const_tree exp ATTRIBUTE_UNUSED,
7716 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7717 {
7718 assemble_label (file, name);
7719 }
7720
7721 /* This is the default behavior at the beginning of a file. It's
7722 controlled by two other target-hook toggles. */
7723 void
7724 default_file_start (void)
7725 {
7726 if (targetm.asm_file_start_app_off
7727 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
7728 fputs (ASM_APP_OFF, asm_out_file);
7729
7730 if (targetm.asm_file_start_file_directive)
7731 {
7732 /* LTO produced units have no meaningful main_input_filename. */
7733 if (in_lto_p)
7734 output_file_directive (asm_out_file, "<artificial>");
7735 else
7736 output_file_directive (asm_out_file, main_input_filename);
7737 }
7738 }
7739
7740 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END
7741 which emits a special section directive used to indicate whether or
7742 not this object file needs an executable stack. This is primarily
7743 a GNU extension to ELF but could be used on other targets. */
7744
7745 int trampolines_created;
7746
7747 void
7748 file_end_indicate_exec_stack (void)
7749 {
7750 unsigned int flags = SECTION_DEBUG;
7751 if (trampolines_created)
7752 flags |= SECTION_CODE;
7753
7754 switch_to_section (get_section (".note.GNU-stack", flags, NULL));
7755 }
7756
7757 /* Emit a special section directive to indicate that this object file
7758 was compiled with -fsplit-stack. This is used to let the linker
7759 detect calls between split-stack code and non-split-stack code, so
7760 that it can modify the split-stack code to allocate a sufficiently
7761 large stack. We emit another special section if there are any
7762 functions in this file which have the no_split_stack attribute, to
7763 prevent the linker from warning about being unable to convert the
7764 functions if they call non-split-stack code. */
7765
7766 void
7767 file_end_indicate_split_stack (void)
7768 {
7769 if (flag_split_stack)
7770 {
7771 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG,
7772 NULL));
7773 if (saw_no_split_stack)
7774 switch_to_section (get_section (".note.GNU-no-split-stack",
7775 SECTION_DEBUG, NULL));
7776 }
7777 }
7778
7779 /* Output DIRECTIVE (a C string) followed by a newline. This is used as
7780 a get_unnamed_section callback. */
7781
7782 void
7783 output_section_asm_op (const void *directive)
7784 {
7785 fprintf (asm_out_file, "%s\n", (const char *) directive);
7786 }
7787
7788 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if
7789 the current section is NEW_SECTION. */
7790
7791 void
7792 switch_to_section (section *new_section, tree decl)
7793 {
7794 bool retain_p;
7795 if ((new_section->common.flags & SECTION_NAMED)
7796 && decl != nullptr
7797 && DECL_P (decl)
7798 && ((retain_p = !!lookup_attribute ("retain",
7799 DECL_ATTRIBUTES (decl)))
7800 != !!(new_section->common.flags & SECTION_RETAIN)))
7801 {
7802 /* If the SECTION_RETAIN bit doesn't match, switch to a new
7803 section. */
7804 tree used_decl, no_used_decl;
7805
7806 if (retain_p)
7807 {
7808 new_section->common.flags |= SECTION_RETAIN;
7809 used_decl = decl;
7810 no_used_decl = new_section->named.decl;
7811 }
7812 else
7813 {
7814 new_section->common.flags &= ~(SECTION_RETAIN
7815 | SECTION_DECLARED);
7816 used_decl = new_section->named.decl;
7817 no_used_decl = decl;
7818 }
7819 if (no_used_decl != used_decl)
7820 {
7821 warning (OPT_Wattributes,
7822 "%+qD without %<retain%> attribute and %qD with "
7823 "%<retain%> attribute are placed in a section with "
7824 "the same name", no_used_decl, used_decl);
7825 inform (DECL_SOURCE_LOCATION (used_decl),
7826 "%qD was declared here", used_decl);
7827 }
7828 }
7829 else if (in_section == new_section)
7830 return;
7831
7832 in_section = new_section;
7833
7834 switch (SECTION_STYLE (new_section))
7835 {
7836 case SECTION_NAMED:
7837 targetm.asm_out.named_section (new_section->named.name,
7838 new_section->named.common.flags,
7839 new_section->named.decl);
7840 break;
7841
7842 case SECTION_UNNAMED:
7843 new_section->unnamed.callback (new_section->unnamed.data);
7844 break;
7845
7846 case SECTION_NOSWITCH:
7847 gcc_unreachable ();
7848 break;
7849 }
7850
7851 new_section->common.flags |= SECTION_DECLARED;
7852 }
7853
7854 /* If block symbol SYMBOL has not yet been assigned an offset, place
7855 it at the end of its block. */
7856
7857 void
7858 place_block_symbol (rtx symbol)
7859 {
7860 unsigned HOST_WIDE_INT size, mask, offset;
7861 class constant_descriptor_rtx *desc;
7862 unsigned int alignment;
7863 struct object_block *block;
7864 tree decl;
7865
7866 gcc_assert (SYMBOL_REF_BLOCK (symbol));
7867 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
7868 return;
7869
7870 /* Work out the symbol's size and alignment. */
7871 if (CONSTANT_POOL_ADDRESS_P (symbol))
7872 {
7873 desc = SYMBOL_REF_CONSTANT (symbol);
7874 alignment = desc->align;
7875 size = GET_MODE_SIZE (desc->mode);
7876 }
7877 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7878 {
7879 decl = SYMBOL_REF_DECL (symbol);
7880 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl));
7881 alignment = DECL_ALIGN (decl);
7882 size = get_constant_size (DECL_INITIAL (decl));
7883 if ((flag_sanitize & SANITIZE_ADDRESS)
7884 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7885 && asan_protect_global (DECL_INITIAL (decl)))
7886 {
7887 size += asan_red_zone_size (size);
7888 alignment = MAX (alignment,
7889 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7890 }
7891 }
7892 else
7893 {
7894 struct symtab_node *snode;
7895 decl = SYMBOL_REF_DECL (symbol);
7896
7897 snode = symtab_node::get (decl);
7898 if (snode->alias)
7899 {
7900 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
7901
7902 gcc_assert (MEM_P (target)
7903 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF
7904 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0)));
7905 target = XEXP (target, 0);
7906 place_block_symbol (target);
7907 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
7908 return;
7909 }
7910 alignment = get_variable_align (decl);
7911 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7912 if ((flag_sanitize & SANITIZE_ADDRESS)
7913 && asan_protect_global (decl))
7914 {
7915 size += asan_red_zone_size (size);
7916 alignment = MAX (alignment,
7917 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7918 }
7919 }
7920
7921 /* Calculate the object's offset from the start of the block. */
7922 block = SYMBOL_REF_BLOCK (symbol);
7923 mask = alignment / BITS_PER_UNIT - 1;
7924 offset = (block->size + mask) & ~mask;
7925 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
7926
7927 /* Record the block's new alignment and size. */
7928 block->alignment = MAX (block->alignment, alignment);
7929 block->size = offset + size;
7930
7931 vec_safe_push (block->objects, symbol);
7932 }
7933
7934 /* Return the anchor that should be used to address byte offset OFFSET
7935 from the first object in BLOCK. MODEL is the TLS model used
7936 to access it. */
7937
7938 rtx
7939 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
7940 enum tls_model model)
7941 {
7942 char label[100];
7943 unsigned int begin, middle, end;
7944 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
7945 rtx anchor;
7946
7947 /* Work out the anchor's offset. Use an offset of 0 for the first
7948 anchor so that we don't pessimize the case where we take the address
7949 of a variable at the beginning of the block. This is particularly
7950 useful when a block has only one variable assigned to it.
7951
7952 We try to place anchors RANGE bytes apart, so there can then be
7953 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
7954 a ptr_mode offset. With some target settings, the lowest such
7955 anchor might be out of range for the lowest ptr_mode offset;
7956 likewise the highest anchor for the highest offset. Use anchors
7957 at the extreme ends of the ptr_mode range in such cases.
7958
7959 All arithmetic uses unsigned integers in order to avoid
7960 signed overflow. */
7961 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
7962 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
7963 range = max_offset - min_offset + 1;
7964 if (range == 0)
7965 offset = 0;
7966 else
7967 {
7968 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1);
7969 if (offset < 0)
7970 {
7971 delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
7972 delta -= delta % range;
7973 if (delta > bias)
7974 delta = bias;
7975 offset = (HOST_WIDE_INT) (-delta);
7976 }
7977 else
7978 {
7979 delta = (unsigned HOST_WIDE_INT) offset - min_offset;
7980 delta -= delta % range;
7981 if (delta > bias - 1)
7982 delta = bias - 1;
7983 offset = (HOST_WIDE_INT) delta;
7984 }
7985 }
7986
7987 /* Do a binary search to see if there's already an anchor we can use.
7988 Set BEGIN to the new anchor's index if not. */
7989 begin = 0;
7990 end = vec_safe_length (block->anchors);
7991 while (begin != end)
7992 {
7993 middle = (end + begin) / 2;
7994 anchor = (*block->anchors)[middle];
7995 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
7996 end = middle;
7997 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
7998 begin = middle + 1;
7999 else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
8000 end = middle;
8001 else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
8002 begin = middle + 1;
8003 else
8004 return anchor;
8005 }
8006
8007 /* Create a new anchor with a unique label. */
8008 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
8009 anchor = create_block_symbol (ggc_strdup (label), block, offset);
8010 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
8011 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
8012
8013 /* Insert it at index BEGIN. */
8014 vec_safe_insert (block->anchors, begin, anchor);
8015 return anchor;
8016 }
8017
8018 /* Output the objects in BLOCK. */
8019
8020 static void
8021 output_object_block (struct object_block *block)
8022 {
8023 class constant_descriptor_rtx *desc;
8024 unsigned int i;
8025 HOST_WIDE_INT offset;
8026 tree decl;
8027 rtx symbol;
8028
8029 if (!block->objects)
8030 return;
8031
8032 /* Switch to the section and make sure that the first byte is
8033 suitably aligned. */
8034 /* Special case VTV comdat sections similar to assemble_variable. */
8035 if (SECTION_STYLE (block->sect) == SECTION_NAMED
8036 && block->sect->named.name
8037 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0))
8038 handle_vtv_comdat_section (block->sect, block->sect->named.decl);
8039 else
8040 switch_to_section (block->sect, SYMBOL_REF_DECL ((*block->objects)[0]));
8041
8042 gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE));
8043 assemble_align (block->alignment);
8044
8045 /* Define the values of all anchors relative to the current section
8046 position. */
8047 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
8048 targetm.asm_out.output_anchor (symbol);
8049
8050 /* Output the objects themselves. */
8051 offset = 0;
8052 FOR_EACH_VEC_ELT (*block->objects, i, symbol)
8053 {
8054 /* Move to the object's offset, padding with zeros if necessary. */
8055 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
8056 offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
8057 if (CONSTANT_POOL_ADDRESS_P (symbol))
8058 {
8059 desc = SYMBOL_REF_CONSTANT (symbol);
8060 /* Pass 1 for align as we have already laid out everything in the block.
8061 So aligning shouldn't be necessary. */
8062 output_constant_pool_1 (desc, 1);
8063 offset += GET_MODE_SIZE (desc->mode);
8064 }
8065 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
8066 {
8067 HOST_WIDE_INT size;
8068 decl = SYMBOL_REF_DECL (symbol);
8069 assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0),
8070 DECL_ALIGN (decl), false);
8071
8072 size = get_constant_size (DECL_INITIAL (decl));
8073 offset += size;
8074 if ((flag_sanitize & SANITIZE_ADDRESS)
8075 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
8076 && asan_protect_global (DECL_INITIAL (decl)))
8077 {
8078 size = asan_red_zone_size (size);
8079 assemble_zeros (size);
8080 offset += size;
8081 }
8082 }
8083 else
8084 {
8085 HOST_WIDE_INT size;
8086 decl = SYMBOL_REF_DECL (symbol);
8087 assemble_variable_contents (decl, XSTR (symbol, 0), false, false);
8088 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
8089 offset += size;
8090 if ((flag_sanitize & SANITIZE_ADDRESS)
8091 && asan_protect_global (decl))
8092 {
8093 size = asan_red_zone_size (size);
8094 assemble_zeros (size);
8095 offset += size;
8096 }
8097 }
8098 }
8099 }
8100
8101 /* A callback for qsort to compare object_blocks. */
8102
8103 static int
8104 output_object_block_compare (const void *x, const void *y)
8105 {
8106 object_block *p1 = *(object_block * const*)x;
8107 object_block *p2 = *(object_block * const*)y;
8108
8109 if (p1->sect->common.flags & SECTION_NAMED
8110 && !(p2->sect->common.flags & SECTION_NAMED))
8111 return 1;
8112
8113 if (!(p1->sect->common.flags & SECTION_NAMED)
8114 && p2->sect->common.flags & SECTION_NAMED)
8115 return -1;
8116
8117 if (p1->sect->common.flags & SECTION_NAMED
8118 && p2->sect->common.flags & SECTION_NAMED)
8119 return strcmp (p1->sect->named.name, p2->sect->named.name);
8120
8121 unsigned f1 = p1->sect->common.flags;
8122 unsigned f2 = p2->sect->common.flags;
8123 if (f1 == f2)
8124 return 0;
8125 return f1 < f2 ? -1 : 1;
8126 }
8127
8128 /* Output the definitions of all object_blocks. */
8129
8130 void
8131 output_object_blocks (void)
8132 {
8133 vec<object_block *, va_heap> v;
8134 v.create (object_block_htab->elements ());
8135 object_block *obj;
8136 hash_table<object_block_hasher>::iterator hi;
8137
8138 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi)
8139 v.quick_push (obj);
8140
8141 /* Sort them in order to output them in a deterministic manner,
8142 otherwise we may get .rodata sections in different orders with
8143 and without -g. */
8144 v.qsort (output_object_block_compare);
8145 unsigned i;
8146 FOR_EACH_VEC_ELT (v, i, obj)
8147 output_object_block (obj);
8148
8149 v.release ();
8150 }
8151
8152 /* This function provides a possible implementation of the
8153 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
8154 by -frecord-gcc-switches it creates a new mergeable, string section in the
8155 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
8156 contains the switches in ASCII format.
8157
8158 FIXME: This code does not correctly handle double quote characters
8159 that appear inside strings, (it strips them rather than preserving them).
8160 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
8161 characters - instead it treats them as sub-string separators. Since
8162 we want to emit NUL strings terminators into the object file we have to use
8163 ASM_OUTPUT_SKIP. */
8164
8165 void
8166 elf_record_gcc_switches (const char *options)
8167 {
8168 section *sec = get_section (targetm.asm_out.record_gcc_switches_section,
8169 SECTION_DEBUG | SECTION_MERGE
8170 | SECTION_STRINGS | (SECTION_ENTSIZE & 1), NULL);
8171 switch_to_section (sec);
8172 ASM_OUTPUT_ASCII (asm_out_file, options, strlen (options) + 1);
8173 }
8174
8175 /* Emit text to declare externally defined symbols. It is needed to
8176 properly support non-default visibility. */
8177 void
8178 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
8179 tree decl,
8180 const char *name ATTRIBUTE_UNUSED)
8181 {
8182 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
8183 set in order to avoid putting out names that are never really
8184 used. Always output visibility specified in the source. */
8185 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
8186 && (DECL_VISIBILITY_SPECIFIED (decl)
8187 || targetm.binds_local_p (decl)))
8188 maybe_assemble_visibility (decl);
8189 }
8190
8191 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */
8192
8193 void
8194 default_asm_output_source_filename (FILE *file, const char *name)
8195 {
8196 #ifdef ASM_OUTPUT_SOURCE_FILENAME
8197 ASM_OUTPUT_SOURCE_FILENAME (file, name);
8198 #else
8199 fprintf (file, "\t.file\t");
8200 output_quoted_string (file, name);
8201 putc ('\n', file);
8202 #endif
8203 }
8204
8205 /* Output a file name in the form wanted by System V. */
8206
8207 void
8208 output_file_directive (FILE *asm_file, const char *input_name)
8209 {
8210 int len;
8211 const char *na;
8212
8213 if (input_name == NULL)
8214 input_name = "<stdin>";
8215 else
8216 input_name = remap_debug_filename (input_name);
8217
8218 len = strlen (input_name);
8219 na = input_name + len;
8220
8221 /* NA gets INPUT_NAME sans directory names. */
8222 while (na > input_name)
8223 {
8224 if (IS_DIR_SEPARATOR (na[-1]))
8225 break;
8226 na--;
8227 }
8228
8229 targetm.asm_out.output_source_filename (asm_file, na);
8230 }
8231
8232 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression
8233 EXP. */
8234 rtx
8235 make_debug_expr_from_rtl (const_rtx exp)
8236 {
8237 tree ddecl = make_node (DEBUG_EXPR_DECL), type;
8238 machine_mode mode = GET_MODE (exp);
8239 rtx dval;
8240
8241 DECL_ARTIFICIAL (ddecl) = 1;
8242 if (REG_P (exp) && REG_EXPR (exp))
8243 type = TREE_TYPE (REG_EXPR (exp));
8244 else if (MEM_P (exp) && MEM_EXPR (exp))
8245 type = TREE_TYPE (MEM_EXPR (exp));
8246 else
8247 type = NULL_TREE;
8248 if (type && TYPE_MODE (type) == mode)
8249 TREE_TYPE (ddecl) = type;
8250 else
8251 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1);
8252 SET_DECL_MODE (ddecl, mode);
8253 dval = gen_rtx_DEBUG_EXPR (mode);
8254 DEBUG_EXPR_TREE_DECL (dval) = ddecl;
8255 SET_DECL_RTL (ddecl, dval);
8256 return dval;
8257 }
8258
8259 #ifdef ELF_ASCII_ESCAPES
8260 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */
8261
8262 void
8263 default_elf_asm_output_limited_string (FILE *f, const char *s)
8264 {
8265 int escape;
8266 unsigned char c;
8267
8268 fputs (STRING_ASM_OP, f);
8269 putc ('"', f);
8270 while (*s != '\0')
8271 {
8272 c = *s;
8273 escape = ELF_ASCII_ESCAPES[c];
8274 switch (escape)
8275 {
8276 case 0:
8277 putc (c, f);
8278 break;
8279 case 1:
8280 putc ('\\', f);
8281 putc ('0'+((c>>6)&7), f);
8282 putc ('0'+((c>>3)&7), f);
8283 putc ('0'+(c&7), f);
8284 break;
8285 default:
8286 putc ('\\', f);
8287 putc (escape, f);
8288 break;
8289 }
8290 s++;
8291 }
8292 putc ('\"', f);
8293 putc ('\n', f);
8294 }
8295
8296 /* Default ASM_OUTPUT_ASCII for ELF targets. */
8297
8298 void
8299 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len)
8300 {
8301 const char *limit = s + len;
8302 const char *last_null = NULL;
8303 unsigned bytes_in_chunk = 0;
8304 unsigned char c;
8305 int escape;
8306
8307 for (; s < limit; s++)
8308 {
8309 const char *p;
8310
8311 if (bytes_in_chunk >= 60)
8312 {
8313 putc ('\"', f);
8314 putc ('\n', f);
8315 bytes_in_chunk = 0;
8316 }
8317
8318 if (s > last_null)
8319 {
8320 for (p = s; p < limit && *p != '\0'; p++)
8321 continue;
8322 last_null = p;
8323 }
8324 else
8325 p = last_null;
8326
8327 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT)
8328 {
8329 if (bytes_in_chunk > 0)
8330 {
8331 putc ('\"', f);
8332 putc ('\n', f);
8333 bytes_in_chunk = 0;
8334 }
8335
8336 default_elf_asm_output_limited_string (f, s);
8337 s = p;
8338 }
8339 else
8340 {
8341 if (bytes_in_chunk == 0)
8342 fputs (ASCII_DATA_ASM_OP "\"", f);
8343
8344 c = *s;
8345 escape = ELF_ASCII_ESCAPES[c];
8346 switch (escape)
8347 {
8348 case 0:
8349 putc (c, f);
8350 bytes_in_chunk++;
8351 break;
8352 case 1:
8353 putc ('\\', f);
8354 putc ('0'+((c>>6)&7), f);
8355 putc ('0'+((c>>3)&7), f);
8356 putc ('0'+(c&7), f);
8357 bytes_in_chunk += 4;
8358 break;
8359 default:
8360 putc ('\\', f);
8361 putc (escape, f);
8362 bytes_in_chunk += 2;
8363 break;
8364 }
8365
8366 }
8367 }
8368
8369 if (bytes_in_chunk > 0)
8370 {
8371 putc ('\"', f);
8372 putc ('\n', f);
8373 }
8374 }
8375 #endif
8376
8377 static GTY(()) section *elf_init_array_section;
8378 static GTY(()) section *elf_fini_array_section;
8379
8380 static section *
8381 get_elf_initfini_array_priority_section (int priority,
8382 bool constructor_p)
8383 {
8384 section *sec;
8385 if (priority != DEFAULT_INIT_PRIORITY)
8386 {
8387 char buf[18];
8388 sprintf (buf, "%s.%.5u",
8389 constructor_p ? ".init_array" : ".fini_array",
8390 priority);
8391 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8392 }
8393 else
8394 {
8395 if (constructor_p)
8396 {
8397 if (elf_init_array_section == NULL)
8398 elf_init_array_section
8399 = get_section (".init_array",
8400 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8401 sec = elf_init_array_section;
8402 }
8403 else
8404 {
8405 if (elf_fini_array_section == NULL)
8406 elf_fini_array_section
8407 = get_section (".fini_array",
8408 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8409 sec = elf_fini_array_section;
8410 }
8411 }
8412 return sec;
8413 }
8414
8415 /* Use .init_array section for constructors. */
8416
8417 void
8418 default_elf_init_array_asm_out_constructor (rtx symbol, int priority)
8419 {
8420 section *sec = get_elf_initfini_array_priority_section (priority,
8421 true);
8422 assemble_addr_to_section (symbol, sec);
8423 }
8424
8425 /* Use .fini_array section for destructors. */
8426
8427 void
8428 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority)
8429 {
8430 section *sec = get_elf_initfini_array_priority_section (priority,
8431 false);
8432 assemble_addr_to_section (symbol, sec);
8433 }
8434
8435 /* Default TARGET_ASM_OUTPUT_IDENT hook.
8436
8437 This is a bit of a cheat. The real default is a no-op, but this
8438 hook is the default for all targets with a .ident directive. */
8439
8440 void
8441 default_asm_output_ident_directive (const char *ident_str)
8442 {
8443 const char *ident_asm_op = "\t.ident\t";
8444
8445 /* If we are still in the front end, do not write out the string
8446 to asm_out_file. Instead, add a fake top-level asm statement.
8447 This allows the front ends to use this hook without actually
8448 writing to asm_out_file, to handle #ident or Pragma Ident. */
8449 if (symtab->state == PARSING)
8450 {
8451 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL));
8452 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf));
8453 }
8454 else
8455 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str);
8456 }
8457
8458
8459 /* This function ensures that vtable_map variables are not only
8460 in the comdat section, but that each variable has its own unique
8461 comdat name. Without this the variables end up in the same section
8462 with a single comdat name.
8463
8464 FIXME: resolve_unique_section needs to deal better with
8465 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once
8466 that is fixed, this if-else statement can be replaced with
8467 a single call to "switch_to_section (sect)". */
8468
8469 static void
8470 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED)
8471 {
8472 #if defined (OBJECT_FORMAT_ELF)
8473 targetm.asm_out.named_section (sect->named.name,
8474 sect->named.common.flags
8475 | SECTION_LINKONCE,
8476 DECL_NAME (decl));
8477 in_section = sect;
8478 #else
8479 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here.
8480 Therefore the following check is used.
8481 In case a the target is PE or COFF a comdat group section
8482 is created, e.g. .vtable_map_vars$foo. The linker places
8483 everything in .vtable_map_vars at the end.
8484
8485 A fix could be made in
8486 gcc/config/i386/winnt.c: i386_pe_unique_section. */
8487 if (TARGET_PECOFF)
8488 {
8489 char *name;
8490
8491 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE)
8492 name = ACONCAT ((sect->named.name, "$",
8493 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL));
8494 else
8495 name = ACONCAT ((sect->named.name, "$",
8496 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))),
8497 NULL));
8498
8499 targetm.asm_out.named_section (name,
8500 sect->named.common.flags
8501 | SECTION_LINKONCE,
8502 DECL_NAME (decl));
8503 in_section = sect;
8504 }
8505 else
8506 switch_to_section (sect);
8507 #endif
8508 }
8509
8510 #include "gt-varasm.h"