]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/varasm.c
c++: constexpr, fold, weak redecl, fp/0 [PR103310]
[thirdparty/gcc.git] / gcc / varasm.c
1 /* Output variables, constants and external declarations, for GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file handles generation of all the assembler code
22 *except* the instructions of a function.
23 This includes declarations of variables and their initial values.
24
25 We also output the assembler code for constants stored in memory
26 and are responsible for combining constants with the same value. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "predict.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "stringpool.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "cgraph.h"
42 #include "diagnostic-core.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "version.h"
47 #include "flags.h"
48 #include "stmt.h"
49 #include "expr.h"
50 #include "expmed.h"
51 #include "optabs.h"
52 #include "output.h"
53 #include "langhooks.h"
54 #include "debug.h"
55 #include "common/common-target.h"
56 #include "stringpool.h"
57 #include "attribs.h"
58 #include "asan.h"
59 #include "rtl-iter.h"
60 #include "file-prefix-map.h" /* remap_debug_filename() */
61 #include "alloc-pool.h"
62 #include "toplev.h"
63 #include "opts.h"
64
65 #ifdef XCOFF_DEBUGGING_INFO
66 #include "xcoffout.h" /* Needed for external data declarations. */
67 #endif
68
69 /* The (assembler) name of the first globally-visible object output. */
70 extern GTY(()) const char *first_global_object_name;
71 extern GTY(()) const char *weak_global_object_name;
72
73 const char *first_global_object_name;
74 const char *weak_global_object_name;
75
76 class addr_const;
77 class constant_descriptor_rtx;
78 struct rtx_constant_pool;
79
80 #define n_deferred_constants (crtl->varasm.deferred_constants)
81
82 /* Number for making the label on the next
83 constant that is stored in memory. */
84
85 static GTY(()) int const_labelno;
86
87 /* Carry information from ASM_DECLARE_OBJECT_NAME
88 to ASM_FINISH_DECLARE_OBJECT. */
89
90 int size_directive_output;
91
92 /* The last decl for which assemble_variable was called,
93 if it did ASM_DECLARE_OBJECT_NAME.
94 If the last call to assemble_variable didn't do that,
95 this holds 0. */
96
97 tree last_assemble_variable_decl;
98
99 /* The following global variable indicates if the first basic block
100 in a function belongs to the cold partition or not. */
101
102 bool first_function_block_is_cold;
103
104 /* Whether we saw any functions with no_split_stack. */
105
106 static bool saw_no_split_stack;
107
108 static const char *strip_reg_name (const char *);
109 static int contains_pointers_p (tree);
110 #ifdef ASM_OUTPUT_EXTERNAL
111 static bool incorporeal_function_p (tree);
112 #endif
113 static void decode_addr_const (tree, class addr_const *);
114 static hashval_t const_hash_1 (const tree);
115 static int compare_constant (const tree, const tree);
116 static void output_constant_def_contents (rtx);
117 static void output_addressed_constants (tree, int);
118 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
119 unsigned int, bool, bool);
120 static void globalize_decl (tree);
121 static bool decl_readonly_section_1 (enum section_category);
122 #ifdef BSS_SECTION_ASM_OP
123 #ifdef ASM_OUTPUT_ALIGNED_BSS
124 static void asm_output_aligned_bss (FILE *, tree, const char *,
125 unsigned HOST_WIDE_INT, int)
126 ATTRIBUTE_UNUSED;
127 #endif
128 #endif /* BSS_SECTION_ASM_OP */
129 static void mark_weak (tree);
130 static void output_constant_pool (const char *, tree);
131 static void handle_vtv_comdat_section (section *, const_tree);
132 \f
133 /* Well-known sections, each one associated with some sort of *_ASM_OP. */
134 section *text_section;
135 section *data_section;
136 section *readonly_data_section;
137 section *sdata_section;
138 section *ctors_section;
139 section *dtors_section;
140 section *bss_section;
141 section *sbss_section;
142
143 /* Various forms of common section. All are guaranteed to be nonnull. */
144 section *tls_comm_section;
145 section *comm_section;
146 section *lcomm_section;
147
148 /* A SECTION_NOSWITCH section used for declaring global BSS variables.
149 May be null. */
150 section *bss_noswitch_section;
151
152 /* The section that holds the main exception table, when known. The section
153 is set either by the target's init_sections hook or by the first call to
154 switch_to_exception_section. */
155 section *exception_section;
156
157 /* The section that holds the DWARF2 frame unwind information, when known.
158 The section is set either by the target's init_sections hook or by the
159 first call to switch_to_eh_frame_section. */
160 section *eh_frame_section;
161
162 /* asm_out_file's current section. This is NULL if no section has yet
163 been selected or if we lose track of what the current section is. */
164 section *in_section;
165
166 /* True if code for the current function is currently being directed
167 at the cold section. */
168 bool in_cold_section_p;
169
170 /* The following global holds the "function name" for the code in the
171 cold section of a function, if hot/cold function splitting is enabled
172 and there was actually code that went into the cold section. A
173 pseudo function name is needed for the cold section of code for some
174 debugging tools that perform symbolization. */
175 tree cold_function_name = NULL_TREE;
176
177 /* A linked list of all the unnamed sections. */
178 static GTY(()) section *unnamed_sections;
179
180 /* Return a nonzero value if DECL has a section attribute. */
181 #define IN_NAMED_SECTION(DECL) \
182 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL)
183
184 struct section_hasher : ggc_ptr_hash<section>
185 {
186 typedef const char *compare_type;
187
188 static hashval_t hash (section *);
189 static bool equal (section *, const char *);
190 };
191
192 /* Hash table of named sections. */
193 static GTY(()) hash_table<section_hasher> *section_htab;
194
195 struct object_block_hasher : ggc_ptr_hash<object_block>
196 {
197 typedef const section *compare_type;
198
199 static hashval_t hash (object_block *);
200 static bool equal (object_block *, const section *);
201 };
202
203 /* A table of object_blocks, indexed by section. */
204 static GTY(()) hash_table<object_block_hasher> *object_block_htab;
205
206 /* The next number to use for internal anchor labels. */
207 static GTY(()) int anchor_labelno;
208
209 /* A pool of constants that can be shared between functions. */
210 static GTY(()) struct rtx_constant_pool *shared_constant_pool;
211
212 /* Helper routines for maintaining section_htab. */
213
214 bool
215 section_hasher::equal (section *old, const char *new_name)
216 {
217 return strcmp (old->named.name, new_name) == 0;
218 }
219
220 hashval_t
221 section_hasher::hash (section *old)
222 {
223 return htab_hash_string (old->named.name);
224 }
225
226 /* Return a hash value for section SECT. */
227
228 static hashval_t
229 hash_section (section *sect)
230 {
231 if (sect->common.flags & SECTION_NAMED)
232 return htab_hash_string (sect->named.name);
233 return sect->common.flags & ~SECTION_DECLARED;
234 }
235
236 /* Helper routines for maintaining object_block_htab. */
237
238 inline bool
239 object_block_hasher::equal (object_block *old, const section *new_section)
240 {
241 return old->sect == new_section;
242 }
243
244 hashval_t
245 object_block_hasher::hash (object_block *old)
246 {
247 return hash_section (old->sect);
248 }
249
250 /* Return a new unnamed section with the given fields. */
251
252 section *
253 get_unnamed_section (unsigned int flags, void (*callback) (const void *),
254 const void *data)
255 {
256 section *sect;
257
258 sect = ggc_alloc<section> ();
259 sect->unnamed.common.flags = flags | SECTION_UNNAMED;
260 sect->unnamed.callback = callback;
261 sect->unnamed.data = data;
262 sect->unnamed.next = unnamed_sections;
263
264 unnamed_sections = sect;
265 return sect;
266 }
267
268 /* Return a SECTION_NOSWITCH section with the given fields. */
269
270 static section *
271 get_noswitch_section (unsigned int flags, noswitch_section_callback callback)
272 {
273 section *sect;
274
275 sect = ggc_alloc<section> ();
276 sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
277 sect->noswitch.callback = callback;
278
279 return sect;
280 }
281
282 /* Return the named section structure associated with NAME. Create
283 a new section with the given fields if no such structure exists.
284 When NOT_EXISTING, then fail if the section already exists. Return
285 the existing section if the SECTION_RETAIN bit doesn't match. Set
286 the SECTION_WRITE | SECTION_RELRO bits on the the existing section
287 if one of the section flags is SECTION_WRITE | SECTION_RELRO and the
288 other has none of these flags in named sections and either the section
289 hasn't been declared yet or has been declared as writable. */
290
291 section *
292 get_section (const char *name, unsigned int flags, tree decl,
293 bool not_existing)
294 {
295 section *sect, **slot;
296
297 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name),
298 INSERT);
299 flags |= SECTION_NAMED;
300 if (decl != nullptr
301 && DECL_P (decl)
302 && lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
303 flags |= SECTION_RETAIN;
304 if (*slot == NULL)
305 {
306 sect = ggc_alloc<section> ();
307 sect->named.common.flags = flags;
308 sect->named.name = ggc_strdup (name);
309 sect->named.decl = decl;
310 *slot = sect;
311 }
312 else
313 {
314 if (not_existing)
315 internal_error ("Section already exists: %qs", name);
316
317 sect = *slot;
318 /* It is fine if one of the sections has SECTION_NOTYPE as long as
319 the other has none of the contrary flags (see the logic at the end
320 of default_section_type_flags, below). */
321 if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
322 && !((sect->common.flags | flags)
323 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
324 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
325 {
326 sect->common.flags |= SECTION_NOTYPE;
327 flags |= SECTION_NOTYPE;
328 }
329 if ((sect->common.flags & ~SECTION_DECLARED) != flags
330 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
331 {
332 /* It is fine if one of the section flags is
333 SECTION_WRITE | SECTION_RELRO and the other has none of these
334 flags (i.e. read-only) in named sections and either the
335 section hasn't been declared yet or has been declared as writable.
336 In that case just make sure the resulting flags are
337 SECTION_WRITE | SECTION_RELRO, ie. writable only because of
338 relocations. */
339 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO))
340 == (SECTION_WRITE | SECTION_RELRO)
341 && (sect->common.flags
342 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO))
343 == (flags & ~(SECTION_WRITE | SECTION_RELRO))
344 && ((sect->common.flags & SECTION_DECLARED) == 0
345 || (sect->common.flags & SECTION_WRITE)))
346 {
347 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO);
348 return sect;
349 }
350 /* If the SECTION_RETAIN bit doesn't match, return and switch
351 to a new section later. */
352 if ((sect->common.flags & SECTION_RETAIN)
353 != (flags & SECTION_RETAIN))
354 return sect;
355 /* Sanity check user variables for flag changes. */
356 if (sect->named.decl != NULL
357 && DECL_P (sect->named.decl)
358 && decl != sect->named.decl)
359 {
360 if (decl != NULL && DECL_P (decl))
361 error ("%+qD causes a section type conflict with %qD",
362 decl, sect->named.decl);
363 else
364 error ("section type conflict with %qD", sect->named.decl);
365 inform (DECL_SOURCE_LOCATION (sect->named.decl),
366 "%qD was declared here", sect->named.decl);
367 }
368 else if (decl != NULL && DECL_P (decl))
369 error ("%+qD causes a section type conflict", decl);
370 else
371 error ("section type conflict");
372 /* Make sure we don't error about one section multiple times. */
373 sect->common.flags |= SECTION_OVERRIDE;
374 }
375 }
376 return sect;
377 }
378
379 /* Return true if the current compilation mode benefits from having
380 objects grouped into blocks. */
381
382 static bool
383 use_object_blocks_p (void)
384 {
385 return flag_section_anchors;
386 }
387
388 /* Return the object_block structure for section SECT. Create a new
389 structure if we haven't created one already. Return null if SECT
390 itself is null. Return also null for mergeable sections since
391 section anchors can't be used in mergeable sections anyway,
392 because the linker might move objects around, and using the
393 object blocks infrastructure in that case is both a waste and a
394 maintenance burden. */
395
396 static struct object_block *
397 get_block_for_section (section *sect)
398 {
399 struct object_block *block;
400
401 if (sect == NULL)
402 return NULL;
403
404 if (sect->common.flags & SECTION_MERGE)
405 return NULL;
406
407 object_block **slot
408 = object_block_htab->find_slot_with_hash (sect, hash_section (sect),
409 INSERT);
410 block = *slot;
411 if (block == NULL)
412 {
413 block = ggc_cleared_alloc<object_block> ();
414 block->sect = sect;
415 *slot = block;
416 }
417 return block;
418 }
419
420 /* Create a symbol with label LABEL and place it at byte offset
421 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
422 is not yet known. LABEL must be a garbage-collected string. */
423
424 static rtx
425 create_block_symbol (const char *label, struct object_block *block,
426 HOST_WIDE_INT offset)
427 {
428 rtx symbol;
429 unsigned int size;
430
431 /* Create the extended SYMBOL_REF. */
432 size = RTX_HDR_SIZE + sizeof (struct block_symbol);
433 symbol = (rtx) ggc_internal_alloc (size);
434
435 /* Initialize the normal SYMBOL_REF fields. */
436 memset (symbol, 0, size);
437 PUT_CODE (symbol, SYMBOL_REF);
438 PUT_MODE (symbol, Pmode);
439 XSTR (symbol, 0) = label;
440 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO;
441
442 /* Initialize the block_symbol stuff. */
443 SYMBOL_REF_BLOCK (symbol) = block;
444 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
445
446 return symbol;
447 }
448
449 /* Return a section with a particular name and with whatever SECTION_*
450 flags section_type_flags deems appropriate. The name of the section
451 is taken from NAME if nonnull, otherwise it is taken from DECL's
452 DECL_SECTION_NAME. DECL is the decl associated with the section
453 (see the section comment for details) and RELOC is as for
454 section_type_flags. */
455
456 section *
457 get_named_section (tree decl, const char *name, int reloc)
458 {
459 unsigned int flags;
460
461 if (name == NULL)
462 {
463 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl));
464 name = DECL_SECTION_NAME (decl);
465 }
466
467 flags = targetm.section_type_flags (decl, name, reloc);
468 return get_section (name, flags, decl);
469 }
470
471 /* Worker for resolve_unique_section. */
472
473 static bool
474 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
475 {
476 n->implicit_section = true;
477 return false;
478 }
479
480 /* If required, set DECL_SECTION_NAME to a unique name. */
481
482 void
483 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
484 int flag_function_or_data_sections)
485 {
486 if (DECL_SECTION_NAME (decl) == NULL
487 && targetm_common.have_named_sections
488 && (flag_function_or_data_sections
489 || lookup_attribute ("retain", DECL_ATTRIBUTES (decl))
490 || DECL_COMDAT_GROUP (decl)))
491 {
492 targetm.asm_out.unique_section (decl, reloc);
493 if (DECL_SECTION_NAME (decl))
494 symtab_node::get (decl)->call_for_symbol_and_aliases
495 (set_implicit_section, NULL, true);
496 }
497 }
498
499 #ifdef BSS_SECTION_ASM_OP
500
501 #ifdef ASM_OUTPUT_ALIGNED_BSS
502
503 /* Utility function for targets to use in implementing
504 ASM_OUTPUT_ALIGNED_BSS.
505 ??? It is believed that this function will work in most cases so such
506 support is localized here. */
507
508 static void
509 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
510 const char *name, unsigned HOST_WIDE_INT size,
511 int align)
512 {
513 switch_to_section (bss_section);
514 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
515 #ifdef ASM_DECLARE_OBJECT_NAME
516 last_assemble_variable_decl = decl;
517 ASM_DECLARE_OBJECT_NAME (file, name, decl);
518 #else
519 /* Standard thing is just output label for the object. */
520 ASM_OUTPUT_LABEL (file, name);
521 #endif /* ASM_DECLARE_OBJECT_NAME */
522 ASM_OUTPUT_SKIP (file, size ? size : 1);
523 }
524
525 #endif
526
527 #endif /* BSS_SECTION_ASM_OP */
528
529 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS
530 /* Return the hot section for function DECL. Return text_section for
531 null DECLs. */
532
533 static section *
534 hot_function_section (tree decl)
535 {
536 if (decl != NULL_TREE
537 && DECL_SECTION_NAME (decl) != NULL
538 && targetm_common.have_named_sections)
539 return get_named_section (decl, NULL, 0);
540 else
541 return text_section;
542 }
543 #endif
544
545 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL)
546 is NULL.
547
548 When DECL_SECTION_NAME is non-NULL and it is implicit section and
549 NAMED_SECTION_SUFFIX is non-NULL, then produce section called
550 concatenate the name with NAMED_SECTION_SUFFIX.
551 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */
552
553 section *
554 get_named_text_section (tree decl,
555 const char *text_section_name,
556 const char *named_section_suffix)
557 {
558 if (decl && DECL_SECTION_NAME (decl))
559 {
560 if (named_section_suffix)
561 {
562 const char *dsn = DECL_SECTION_NAME (decl);
563 const char *stripped_name;
564 char *name, *buffer;
565
566 name = (char *) alloca (strlen (dsn) + 1);
567 memcpy (name, dsn,
568 strlen (dsn) + 1);
569
570 stripped_name = targetm.strip_name_encoding (name);
571
572 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
573 return get_named_section (decl, buffer, 0);
574 }
575 else if (symtab_node::get (decl)->implicit_section)
576 {
577 const char *name;
578
579 /* Do not try to split gnu_linkonce functions. This gets somewhat
580 slipperly. */
581 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP)
582 return NULL;
583 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
584 name = targetm.strip_name_encoding (name);
585 return get_named_section (decl, ACONCAT ((text_section_name, ".",
586 name, NULL)), 0);
587 }
588 else
589 return NULL;
590 }
591 return get_named_section (decl, text_section_name, 0);
592 }
593
594 /* Choose named function section based on its frequency. */
595
596 section *
597 default_function_section (tree decl, enum node_frequency freq,
598 bool startup, bool exit)
599 {
600 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG
601 /* Old GNU linkers have buggy --gc-section support, which sometimes
602 results in .gcc_except_table* sections being garbage collected. */
603 if (decl
604 && symtab_node::get (decl)->implicit_section)
605 return NULL;
606 #endif
607
608 if (!flag_reorder_functions
609 || !targetm_common.have_named_sections)
610 return NULL;
611 /* Startup code should go to startup subsection unless it is
612 unlikely executed (this happens especially with function splitting
613 where we can split away unnecessary parts of static constructors. */
614 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
615 {
616 /* During LTO the tp_first_run profiling will naturally place all
617 initialization code first. Using separate section is counter-productive
618 because startup only code may call functions which are no longer
619 startup only. */
620 if (!in_lto_p
621 || !cgraph_node::get (decl)->tp_first_run
622 || !opt_for_fn (decl, flag_profile_reorder_functions))
623 return get_named_text_section (decl, ".text.startup", NULL);
624 else
625 return NULL;
626 }
627
628 /* Similarly for exit. */
629 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
630 return get_named_text_section (decl, ".text.exit", NULL);
631
632 /* Group cold functions together, similarly for hot code. */
633 switch (freq)
634 {
635 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
636 return get_named_text_section (decl, ".text.unlikely", NULL);
637 case NODE_FREQUENCY_HOT:
638 return get_named_text_section (decl, ".text.hot", NULL);
639 /* FALLTHRU */
640 default:
641 return NULL;
642 }
643 }
644
645 /* Return the section for function DECL.
646
647 If DECL is NULL_TREE, return the text section. We can be passed
648 NULL_TREE under some circumstances by dbxout.c at least.
649
650 If FORCE_COLD is true, return cold function section ignoring
651 the frequency info of cgraph_node. */
652
653 static section *
654 function_section_1 (tree decl, bool force_cold)
655 {
656 section *section = NULL;
657 enum node_frequency freq = NODE_FREQUENCY_NORMAL;
658 bool startup = false, exit = false;
659
660 if (decl)
661 {
662 struct cgraph_node *node = cgraph_node::get (decl);
663
664 if (node)
665 {
666 freq = node->frequency;
667 startup = node->only_called_at_startup;
668 exit = node->only_called_at_exit;
669 }
670 }
671 if (force_cold)
672 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED;
673
674 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS
675 if (decl != NULL_TREE
676 && DECL_SECTION_NAME (decl) != NULL)
677 {
678 if (targetm.asm_out.function_section)
679 section = targetm.asm_out.function_section (decl, freq,
680 startup, exit);
681 if (section)
682 return section;
683 return get_named_section (decl, NULL, 0);
684 }
685 else
686 return targetm.asm_out.select_section
687 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED,
688 symtab_node::get (decl)->definition_alignment ());
689 #else
690 if (targetm.asm_out.function_section)
691 section = targetm.asm_out.function_section (decl, freq, startup, exit);
692 if (section)
693 return section;
694 return hot_function_section (decl);
695 #endif
696 }
697
698 /* Return the section for function DECL.
699
700 If DECL is NULL_TREE, return the text section. We can be passed
701 NULL_TREE under some circumstances by dbxout.c at least. */
702
703 section *
704 function_section (tree decl)
705 {
706 /* Handle cases where function splitting code decides
707 to put function entry point into unlikely executed section
708 despite the fact that the function itself is not cold
709 (i.e. it is called rarely but contains a hot loop that is
710 better to live in hot subsection for the code locality). */
711 return function_section_1 (decl,
712 first_function_block_is_cold);
713 }
714
715 /* Return the section for the current function, take IN_COLD_SECTION_P
716 into account. */
717
718 section *
719 current_function_section (void)
720 {
721 return function_section_1 (current_function_decl, in_cold_section_p);
722 }
723
724 /* Tell assembler to switch to unlikely-to-be-executed text section. */
725
726 section *
727 unlikely_text_section (void)
728 {
729 return function_section_1 (current_function_decl, true);
730 }
731
732 /* When called within a function context, return true if the function
733 has been assigned a cold text section and if SECT is that section.
734 When called outside a function context, return true if SECT is the
735 default cold section. */
736
737 bool
738 unlikely_text_section_p (section *sect)
739 {
740 return sect == function_section_1 (current_function_decl, true);
741 }
742
743 /* Switch to the other function partition (if inside of hot section
744 into cold section, otherwise into the hot section). */
745
746 void
747 switch_to_other_text_partition (void)
748 {
749 in_cold_section_p = !in_cold_section_p;
750 switch_to_section (current_function_section ());
751 }
752
753 /* Return the read-only or relocated read-only data section
754 associated with function DECL. */
755
756 section *
757 default_function_rodata_section (tree decl, bool relocatable)
758 {
759 const char* sname;
760 unsigned int flags;
761
762 flags = 0;
763
764 if (relocatable)
765 {
766 sname = ".data.rel.ro.local";
767 flags = (SECTION_WRITE | SECTION_RELRO);
768 }
769 else
770 sname = ".rodata";
771
772 if (decl && DECL_SECTION_NAME (decl))
773 {
774 const char *name = DECL_SECTION_NAME (decl);
775
776 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP)
777 {
778 const char *dot;
779 size_t len;
780 char* rname;
781
782 dot = strchr (name + 1, '.');
783 if (!dot)
784 dot = name;
785 len = strlen (dot) + strlen (sname) + 1;
786 rname = (char *) alloca (len);
787
788 strcpy (rname, sname);
789 strcat (rname, dot);
790 return get_section (rname, (SECTION_LINKONCE | flags), decl);
791 }
792 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo or
793 .gnu.linkonce.d.rel.ro.local.foo if the jump table is relocatable. */
794 else if (DECL_COMDAT_GROUP (decl)
795 && startswith (name, ".gnu.linkonce.t."))
796 {
797 size_t len;
798 char *rname;
799
800 if (relocatable)
801 {
802 len = strlen (name) + strlen (".rel.ro.local") + 1;
803 rname = (char *) alloca (len);
804
805 strcpy (rname, ".gnu.linkonce.d.rel.ro.local");
806 strcat (rname, name + 15);
807 }
808 else
809 {
810 len = strlen (name) + 1;
811 rname = (char *) alloca (len);
812
813 memcpy (rname, name, len);
814 rname[14] = 'r';
815 }
816 return get_section (rname, (SECTION_LINKONCE | flags), decl);
817 }
818 /* For .text.foo we want to use .rodata.foo. */
819 else if (flag_function_sections && flag_data_sections
820 && startswith (name, ".text."))
821 {
822 size_t len = strlen (name) + 1;
823 char *rname = (char *) alloca (len + strlen (sname) - 5);
824
825 memcpy (rname, sname, strlen (sname));
826 memcpy (rname + strlen (sname), name + 5, len - 5);
827 return get_section (rname, flags, decl);
828 }
829 }
830
831 if (relocatable)
832 return get_section (sname, flags, decl);
833 else
834 return readonly_data_section;
835 }
836
837 /* Return the read-only data section associated with function DECL
838 for targets where that section should be always the single
839 readonly data section. */
840
841 section *
842 default_no_function_rodata_section (tree, bool)
843 {
844 return readonly_data_section;
845 }
846
847 /* A subroutine of mergeable_string_section and mergeable_constant_section. */
848
849 static const char *
850 function_mergeable_rodata_prefix (void)
851 {
852 section *s = targetm.asm_out.function_rodata_section (current_function_decl,
853 false);
854 if (SECTION_STYLE (s) == SECTION_NAMED)
855 return s->named.name;
856 else
857 return targetm.asm_out.mergeable_rodata_prefix;
858 }
859
860 /* Return the section to use for string merging. */
861
862 static section *
863 mergeable_string_section (tree decl ATTRIBUTE_UNUSED,
864 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
865 unsigned int flags ATTRIBUTE_UNUSED)
866 {
867 HOST_WIDE_INT len;
868
869 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
870 && TREE_CODE (decl) == STRING_CST
871 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
872 && align <= 256
873 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
874 && TREE_STRING_LENGTH (decl) == len)
875 {
876 scalar_int_mode mode;
877 unsigned int modesize;
878 const char *str;
879 HOST_WIDE_INT i;
880 int j, unit;
881 const char *prefix = function_mergeable_rodata_prefix ();
882 char *name = (char *) alloca (strlen (prefix) + 30);
883
884 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
885 modesize = GET_MODE_BITSIZE (mode);
886 if (modesize >= 8 && modesize <= 256
887 && (modesize & (modesize - 1)) == 0)
888 {
889 if (align < modesize)
890 align = modesize;
891
892 if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8)
893 return readonly_data_section;
894
895 str = TREE_STRING_POINTER (decl);
896 unit = GET_MODE_SIZE (mode);
897
898 /* Check for embedded NUL characters. */
899 for (i = 0; i < len; i += unit)
900 {
901 for (j = 0; j < unit; j++)
902 if (str[i + j] != '\0')
903 break;
904 if (j == unit)
905 break;
906 }
907 if (i == len - unit || (unit == 1 && i == len))
908 {
909 sprintf (name, "%s.str%d.%d", prefix,
910 modesize / 8, (int) (align / 8));
911 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS;
912 return get_section (name, flags, NULL);
913 }
914 }
915 }
916
917 return readonly_data_section;
918 }
919
920 /* Return the section to use for constant merging. */
921
922 section *
923 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
924 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
925 unsigned int flags ATTRIBUTE_UNUSED)
926 {
927 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
928 && mode != VOIDmode
929 && mode != BLKmode
930 && known_le (GET_MODE_BITSIZE (mode), align)
931 && align >= 8
932 && align <= 256
933 && (align & (align - 1)) == 0
934 && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8))
935 {
936 const char *prefix = function_mergeable_rodata_prefix ();
937 char *name = (char *) alloca (strlen (prefix) + 30);
938
939 sprintf (name, "%s.cst%d", prefix, (int) (align / 8));
940 flags |= (align / 8) | SECTION_MERGE;
941 return get_section (name, flags, NULL);
942 }
943 return readonly_data_section;
944 }
945 \f
946 /* Given NAME, a putative register name, discard any customary prefixes. */
947
948 static const char *
949 strip_reg_name (const char *name)
950 {
951 #ifdef REGISTER_PREFIX
952 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX)))
953 name += strlen (REGISTER_PREFIX);
954 #endif
955 if (name[0] == '%' || name[0] == '#')
956 name++;
957 return name;
958 }
959 \f
960 /* The user has asked for a DECL to have a particular name. Set (or
961 change) it in such a way that we don't prefix an underscore to
962 it. */
963 void
964 set_user_assembler_name (tree decl, const char *name)
965 {
966 char *starred = (char *) alloca (strlen (name) + 2);
967 starred[0] = '*';
968 strcpy (starred + 1, name);
969 symtab->change_decl_assembler_name (decl, get_identifier (starred));
970 SET_DECL_RTL (decl, NULL_RTX);
971 }
972 \f
973 /* Decode an `asm' spec for a declaration as a register name.
974 Return the register number, or -1 if nothing specified,
975 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized,
976 or -3 if ASMSPEC is `cc' and is not recognized,
977 or -4 if ASMSPEC is `memory' and is not recognized.
978 Accept an exact spelling or a decimal number.
979 Prefixes such as % are optional. */
980
981 int
982 decode_reg_name_and_count (const char *asmspec, int *pnregs)
983 {
984 /* Presume just one register is clobbered. */
985 *pnregs = 1;
986
987 if (asmspec != 0)
988 {
989 int i;
990
991 /* Get rid of confusing prefixes. */
992 asmspec = strip_reg_name (asmspec);
993
994 /* Allow a decimal number as a "register name". */
995 for (i = strlen (asmspec) - 1; i >= 0; i--)
996 if (! ISDIGIT (asmspec[i]))
997 break;
998 if (asmspec[0] != 0 && i < 0)
999 {
1000 i = atoi (asmspec);
1001 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0])
1002 return i;
1003 else
1004 return -2;
1005 }
1006
1007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1008 if (reg_names[i][0]
1009 && ! strcmp (asmspec, strip_reg_name (reg_names[i])))
1010 return i;
1011
1012 #ifdef OVERLAPPING_REGISTER_NAMES
1013 {
1014 static const struct
1015 {
1016 const char *const name;
1017 const int number;
1018 const int nregs;
1019 } table[] = OVERLAPPING_REGISTER_NAMES;
1020
1021 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
1022 if (table[i].name[0]
1023 && ! strcmp (asmspec, table[i].name))
1024 {
1025 *pnregs = table[i].nregs;
1026 return table[i].number;
1027 }
1028 }
1029 #endif /* OVERLAPPING_REGISTER_NAMES */
1030
1031 #ifdef ADDITIONAL_REGISTER_NAMES
1032 {
1033 static const struct { const char *const name; const int number; } table[]
1034 = ADDITIONAL_REGISTER_NAMES;
1035
1036 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
1037 if (table[i].name[0]
1038 && ! strcmp (asmspec, table[i].name)
1039 && reg_names[table[i].number][0])
1040 return table[i].number;
1041 }
1042 #endif /* ADDITIONAL_REGISTER_NAMES */
1043
1044 if (!strcmp (asmspec, "memory"))
1045 return -4;
1046
1047 if (!strcmp (asmspec, "cc"))
1048 return -3;
1049
1050 return -2;
1051 }
1052
1053 return -1;
1054 }
1055
1056 int
1057 decode_reg_name (const char *name)
1058 {
1059 int count;
1060 return decode_reg_name_and_count (name, &count);
1061 }
1062
1063 \f
1064 /* Return true if DECL's initializer is suitable for a BSS section. */
1065
1066 bool
1067 bss_initializer_p (const_tree decl, bool named)
1068 {
1069 /* Do not put non-common constants into the .bss section, they belong in
1070 a readonly section, except when NAMED is true. */
1071 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
1072 && (DECL_INITIAL (decl) == NULL
1073 /* In LTO we have no errors in program; error_mark_node is used
1074 to mark offlined constructors. */
1075 || (DECL_INITIAL (decl) == error_mark_node
1076 && !in_lto_p)
1077 || (flag_zero_initialized_in_bss
1078 && initializer_zerop (DECL_INITIAL (decl))
1079 /* A decl with the "persistent" attribute applied and
1080 explicitly initialized to 0 should not be treated as a BSS
1081 variable. */
1082 && !DECL_PERSISTENT_P (decl))));
1083 }
1084
1085 /* Compute the alignment of variable specified by DECL.
1086 DONT_OUTPUT_DATA is from assemble_variable. */
1087
1088 void
1089 align_variable (tree decl, bool dont_output_data)
1090 {
1091 unsigned int align = DECL_ALIGN (decl);
1092
1093 /* In the case for initialing an array whose length isn't specified,
1094 where we have not yet been able to do the layout,
1095 figure out the proper alignment now. */
1096 if (dont_output_data && DECL_SIZE (decl) == 0
1097 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1098 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1099
1100 /* Some object file formats have a maximum alignment which they support.
1101 In particular, a.out format supports a maximum alignment of 4. */
1102 if (align > MAX_OFILE_ALIGNMENT)
1103 {
1104 error ("alignment of %q+D is greater than maximum object "
1105 "file alignment %d", decl,
1106 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
1107 align = MAX_OFILE_ALIGNMENT;
1108 }
1109
1110 if (! DECL_USER_ALIGN (decl))
1111 {
1112 #ifdef DATA_ABI_ALIGNMENT
1113 unsigned int data_abi_align
1114 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1115 /* For backwards compatibility, don't assume the ABI alignment for
1116 TLS variables. */
1117 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD)
1118 align = data_abi_align;
1119 #endif
1120
1121 /* On some machines, it is good to increase alignment sometimes.
1122 But as DECL_ALIGN is used both for actually emitting the variable
1123 and for code accessing the variable as guaranteed alignment, we
1124 can only increase the alignment if it is a performance optimization
1125 if the references to it must bind to the current definition. */
1126 if (decl_binds_to_current_def_p (decl)
1127 && !DECL_VIRTUAL_P (decl))
1128 {
1129 #ifdef DATA_ALIGNMENT
1130 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1131 /* Don't increase alignment too much for TLS variables - TLS space
1132 is too precious. */
1133 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1134 align = data_align;
1135 #endif
1136 if (DECL_INITIAL (decl) != 0
1137 /* In LTO we have no errors in program; error_mark_node is used
1138 to mark offlined constructors. */
1139 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1140 {
1141 unsigned int const_align
1142 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1143 /* Don't increase alignment too much for TLS variables - TLS
1144 space is too precious. */
1145 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1146 align = const_align;
1147 }
1148 }
1149 }
1150
1151 /* Reset the alignment in case we have made it tighter, so we can benefit
1152 from it in get_pointer_alignment. */
1153 SET_DECL_ALIGN (decl, align);
1154 }
1155
1156 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes
1157 beyond what align_variable returned. */
1158
1159 static unsigned int
1160 get_variable_align (tree decl)
1161 {
1162 unsigned int align = DECL_ALIGN (decl);
1163
1164 /* For user aligned vars or static vars align_variable already did
1165 everything. */
1166 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl))
1167 return align;
1168
1169 #ifdef DATA_ABI_ALIGNMENT
1170 if (DECL_THREAD_LOCAL_P (decl))
1171 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1172 #endif
1173
1174 /* For decls that bind to the current definition, align_variable
1175 did also everything, except for not assuming ABI required alignment
1176 of TLS variables. For other vars, increase the alignment here
1177 as an optimization. */
1178 if (!decl_binds_to_current_def_p (decl))
1179 {
1180 /* On some machines, it is good to increase alignment sometimes. */
1181 #ifdef DATA_ALIGNMENT
1182 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1183 /* Don't increase alignment too much for TLS variables - TLS space
1184 is too precious. */
1185 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1186 align = data_align;
1187 #endif
1188 if (DECL_INITIAL (decl) != 0
1189 /* In LTO we have no errors in program; error_mark_node is used
1190 to mark offlined constructors. */
1191 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1192 {
1193 unsigned int const_align
1194 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1195 /* Don't increase alignment too much for TLS variables - TLS space
1196 is too precious. */
1197 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1198 align = const_align;
1199 }
1200 }
1201
1202 return align;
1203 }
1204
1205 /* Compute reloc for get_variable_section. The return value
1206 is a mask for which bit 1 indicates a global relocation, and bit 0
1207 indicates a local relocation. */
1208
1209 int
1210 compute_reloc_for_var (tree decl)
1211 {
1212 int reloc;
1213
1214 if (DECL_INITIAL (decl) == error_mark_node)
1215 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
1216 else if (DECL_INITIAL (decl))
1217 reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
1218 else
1219 reloc = 0;
1220
1221 return reloc;
1222 }
1223
1224 /* Return the section into which the given VAR_DECL or CONST_DECL
1225 should be placed. PREFER_NOSWITCH_P is true if a noswitch
1226 section should be used wherever possible. */
1227
1228 section *
1229 get_variable_section (tree decl, bool prefer_noswitch_p)
1230 {
1231 addr_space_t as = ADDR_SPACE_GENERIC;
1232 int reloc;
1233 varpool_node *vnode = varpool_node::get (decl);
1234 if (vnode)
1235 {
1236 vnode = vnode->ultimate_alias_target ();
1237 decl = vnode->decl;
1238 }
1239
1240 if (TREE_TYPE (decl) != error_mark_node)
1241 as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1242
1243 /* We need the constructor to figure out reloc flag. */
1244 if (vnode)
1245 vnode->get_constructor ();
1246
1247 if (DECL_COMMON (decl)
1248 && !lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
1249 {
1250 /* If the decl has been given an explicit section name, or it resides
1251 in a non-generic address space, then it isn't common, and shouldn't
1252 be handled as such. */
1253 gcc_assert (DECL_SECTION_NAME (decl) == NULL
1254 && ADDR_SPACE_GENERIC_P (as));
1255 if (DECL_THREAD_LOCAL_P (decl))
1256 return tls_comm_section;
1257 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
1258 return comm_section;
1259 }
1260
1261 reloc = compute_reloc_for_var (decl);
1262
1263 resolve_unique_section (decl, reloc, flag_data_sections);
1264 if (IN_NAMED_SECTION (decl))
1265 {
1266 section *sect = get_named_section (decl, NULL, reloc);
1267
1268 if ((sect->common.flags & SECTION_BSS)
1269 && !bss_initializer_p (decl, true))
1270 {
1271 error_at (DECL_SOURCE_LOCATION (decl),
1272 "only zero initializers are allowed in section %qs",
1273 sect->named.name);
1274 DECL_INITIAL (decl) = error_mark_node;
1275 }
1276 return sect;
1277 }
1278
1279 if (ADDR_SPACE_GENERIC_P (as)
1280 && !DECL_THREAD_LOCAL_P (decl)
1281 && !DECL_NOINIT_P (decl)
1282 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections)
1283 && bss_initializer_p (decl))
1284 {
1285 if (!TREE_PUBLIC (decl)
1286 && !((flag_sanitize & SANITIZE_ADDRESS)
1287 && asan_protect_global (decl)))
1288 return lcomm_section;
1289 if (bss_noswitch_section)
1290 return bss_noswitch_section;
1291 }
1292
1293 return targetm.asm_out.select_section (decl, reloc,
1294 get_variable_align (decl));
1295 }
1296
1297 /* Return the block into which object_block DECL should be placed. */
1298
1299 static struct object_block *
1300 get_block_for_decl (tree decl)
1301 {
1302 section *sect;
1303
1304 if (VAR_P (decl))
1305 {
1306 /* The object must be defined in this translation unit. */
1307 if (DECL_EXTERNAL (decl))
1308 return NULL;
1309
1310 /* There's no point using object blocks for something that is
1311 isolated by definition. */
1312 if (DECL_COMDAT_GROUP (decl))
1313 return NULL;
1314 }
1315
1316 /* We can only calculate block offsets if the decl has a known
1317 constant size. */
1318 if (DECL_SIZE_UNIT (decl) == NULL)
1319 return NULL;
1320 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
1321 return NULL;
1322
1323 /* Find out which section should contain DECL. We cannot put it into
1324 an object block if it requires a standalone definition. */
1325 if (VAR_P (decl))
1326 align_variable (decl, 0);
1327 sect = get_variable_section (decl, true);
1328 if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
1329 return NULL;
1330
1331 if (bool (lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
1332 != bool (sect->common.flags & SECTION_RETAIN))
1333 return NULL;
1334
1335 return get_block_for_section (sect);
1336 }
1337
1338 /* Make sure block symbol SYMBOL is in block BLOCK. */
1339
1340 static void
1341 change_symbol_block (rtx symbol, struct object_block *block)
1342 {
1343 if (block != SYMBOL_REF_BLOCK (symbol))
1344 {
1345 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
1346 SYMBOL_REF_BLOCK (symbol) = block;
1347 }
1348 }
1349
1350 /* Return true if it is possible to put DECL in an object_block. */
1351
1352 static bool
1353 use_blocks_for_decl_p (tree decl)
1354 {
1355 struct symtab_node *snode;
1356
1357 /* Don't create object blocks if each DECL is placed into a separate
1358 section because that will uselessly create a section anchor for
1359 each DECL. */
1360 if (flag_data_sections)
1361 return false;
1362
1363 /* Only data DECLs can be placed into object blocks. */
1364 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
1365 return false;
1366
1367 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
1368 are never used from code directly and we never want object block handling
1369 for those. */
1370 if (DECL_INITIAL (decl) == decl)
1371 return false;
1372
1373 /* If this decl is an alias, then we don't want to emit a
1374 definition. */
1375 if (VAR_P (decl)
1376 && (snode = symtab_node::get (decl)) != NULL
1377 && snode->alias)
1378 return false;
1379
1380 return targetm.use_blocks_for_decl_p (decl);
1381 }
1382
1383 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS
1384 until we find an identifier that is not itself a transparent alias.
1385 Modify the alias passed to it by reference (and all aliases on the
1386 way to the ultimate target), such that they do not have to be
1387 followed again, and return the ultimate target of the alias
1388 chain. */
1389
1390 static inline tree
1391 ultimate_transparent_alias_target (tree *alias)
1392 {
1393 tree target = *alias;
1394
1395 if (IDENTIFIER_TRANSPARENT_ALIAS (target))
1396 {
1397 gcc_assert (TREE_CHAIN (target));
1398 target = ultimate_transparent_alias_target (&TREE_CHAIN (target));
1399 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target)
1400 && ! TREE_CHAIN (target));
1401 *alias = target;
1402 }
1403
1404 return target;
1405 }
1406
1407 /* Return true if REGNUM is mentioned in ELIMINABLE_REGS as a from
1408 register number. */
1409
1410 static bool
1411 eliminable_regno_p (int regnum)
1412 {
1413 static const struct
1414 {
1415 const int from;
1416 const int to;
1417 } eliminables[] = ELIMINABLE_REGS;
1418 for (size_t i = 0; i < ARRAY_SIZE (eliminables); i++)
1419 if (regnum == eliminables[i].from)
1420 return true;
1421 return false;
1422 }
1423
1424 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
1425 have static storage duration. In other words, it should not be an
1426 automatic variable, including PARM_DECLs.
1427
1428 There is, however, one exception: this function handles variables
1429 explicitly placed in a particular register by the user.
1430
1431 This is never called for PARM_DECL nodes. */
1432
1433 void
1434 make_decl_rtl (tree decl)
1435 {
1436 const char *name = 0;
1437 int reg_number;
1438 tree id;
1439 rtx x;
1440
1441 /* Check that we are not being given an automatic variable. */
1442 gcc_assert (TREE_CODE (decl) != PARM_DECL
1443 && TREE_CODE (decl) != RESULT_DECL);
1444
1445 /* A weak alias has TREE_PUBLIC set but not the other bits. */
1446 gcc_assert (!VAR_P (decl)
1447 || TREE_STATIC (decl)
1448 || TREE_PUBLIC (decl)
1449 || DECL_EXTERNAL (decl)
1450 || DECL_REGISTER (decl));
1451
1452 /* And that we were not given a type or a label. */
1453 gcc_assert (TREE_CODE (decl) != TYPE_DECL
1454 && TREE_CODE (decl) != LABEL_DECL);
1455
1456 /* For a duplicate declaration, we can be called twice on the
1457 same DECL node. Don't discard the RTL already made. */
1458 if (DECL_RTL_SET_P (decl))
1459 {
1460 /* If the old RTL had the wrong mode, fix the mode. */
1461 x = DECL_RTL (decl);
1462 if (GET_MODE (x) != DECL_MODE (decl))
1463 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
1464
1465 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1466 return;
1467
1468 /* ??? Another way to do this would be to maintain a hashed
1469 table of such critters. Instead of adding stuff to a DECL
1470 to give certain attributes to it, we could use an external
1471 hash map from DECL to set of attributes. */
1472
1473 /* Let the target reassign the RTL if it wants.
1474 This is necessary, for example, when one machine specific
1475 decl attribute overrides another. */
1476 targetm.encode_section_info (decl, DECL_RTL (decl), false);
1477
1478 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based
1479 on the new decl information. */
1480 if (MEM_P (x)
1481 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1482 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
1483 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
1484
1485 return;
1486 }
1487
1488 /* If this variable belongs to the global constant pool, retrieve the
1489 pre-computed RTL or recompute it in LTO mode. */
1490 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
1491 {
1492 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1));
1493 return;
1494 }
1495
1496 id = DECL_ASSEMBLER_NAME (decl);
1497 name = IDENTIFIER_POINTER (id);
1498
1499 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
1500 && DECL_REGISTER (decl))
1501 {
1502 error ("register name not specified for %q+D", decl);
1503 }
1504 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1505 {
1506 const char *asmspec = name+1;
1507 machine_mode mode = DECL_MODE (decl);
1508 reg_number = decode_reg_name (asmspec);
1509 /* First detect errors in declaring global registers. */
1510 if (reg_number == -1)
1511 error ("register name not specified for %q+D", decl);
1512 else if (reg_number < 0)
1513 error ("invalid register name for %q+D", decl);
1514 else if (mode == BLKmode)
1515 error ("data type of %q+D isn%'t suitable for a register",
1516 decl);
1517 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number))
1518 error ("the register specified for %q+D cannot be accessed"
1519 " by the current target", decl);
1520 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
1521 error ("the register specified for %q+D is not general enough"
1522 " to be used as a register variable", decl);
1523 else if (!targetm.hard_regno_mode_ok (reg_number, mode))
1524 error ("register specified for %q+D isn%'t suitable for data type",
1525 decl);
1526 else if (reg_number != HARD_FRAME_POINTER_REGNUM
1527 && (reg_number == FRAME_POINTER_REGNUM
1528 #ifdef RETURN_ADDRESS_POINTER_REGNUM
1529 || reg_number == RETURN_ADDRESS_POINTER_REGNUM
1530 #endif
1531 || reg_number == ARG_POINTER_REGNUM)
1532 && eliminable_regno_p (reg_number))
1533 error ("register specified for %q+D is an internal GCC "
1534 "implementation detail", decl);
1535 /* Now handle properly declared static register variables. */
1536 else
1537 {
1538 int nregs;
1539
1540 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl))
1541 {
1542 DECL_INITIAL (decl) = 0;
1543 error ("global register variable has initial value");
1544 }
1545 if (TREE_THIS_VOLATILE (decl))
1546 warning (OPT_Wvolatile_register_var,
1547 "optimization may eliminate reads and/or "
1548 "writes to register variables");
1549
1550 /* If the user specified one of the eliminables registers here,
1551 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable
1552 confused with that register and be eliminated. This usage is
1553 somewhat suspect... */
1554
1555 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number));
1556 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number;
1557 REG_USERVAR_P (DECL_RTL (decl)) = 1;
1558
1559 if (TREE_STATIC (decl))
1560 {
1561 /* Make this register global, so not usable for anything
1562 else. */
1563 #ifdef ASM_DECLARE_REGISTER_GLOBAL
1564 name = IDENTIFIER_POINTER (DECL_NAME (decl));
1565 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
1566 #endif
1567 nregs = hard_regno_nregs (reg_number, mode);
1568 while (nregs > 0)
1569 globalize_reg (decl, reg_number + --nregs);
1570 }
1571
1572 /* As a register variable, it has no section. */
1573 return;
1574 }
1575 /* Avoid internal errors from invalid register
1576 specifications. */
1577 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE);
1578 DECL_HARD_REGISTER (decl) = 0;
1579 /* Also avoid SSA inconsistencies by pretending this is an external
1580 decl now. */
1581 DECL_EXTERNAL (decl) = 1;
1582 return;
1583 }
1584 /* Now handle ordinary static variables and functions (in memory).
1585 Also handle vars declared register invalidly. */
1586 else if (name[0] == '*')
1587 {
1588 #ifdef REGISTER_PREFIX
1589 if (strlen (REGISTER_PREFIX) != 0)
1590 {
1591 reg_number = decode_reg_name (name);
1592 if (reg_number >= 0 || reg_number == -3)
1593 error ("register name given for non-register variable %q+D", decl);
1594 }
1595 #endif
1596 }
1597
1598 /* Specifying a section attribute on a variable forces it into a
1599 non-.bss section, and thus it cannot be common. */
1600 /* FIXME: In general this code should not be necessary because
1601 visibility pass is doing the same work. But notice_global_symbol
1602 is called early and it needs to make DECL_RTL to get the name.
1603 we take care of recomputing the DECL_RTL after visibility is changed. */
1604 if (VAR_P (decl)
1605 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1606 && DECL_SECTION_NAME (decl) != NULL
1607 && DECL_INITIAL (decl) == NULL_TREE
1608 && DECL_COMMON (decl))
1609 DECL_COMMON (decl) = 0;
1610
1611 /* Variables can't be both common and weak. */
1612 if (VAR_P (decl) && DECL_WEAK (decl))
1613 DECL_COMMON (decl) = 0;
1614
1615 if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
1616 x = create_block_symbol (name, get_block_for_decl (decl), -1);
1617 else
1618 {
1619 machine_mode address_mode = Pmode;
1620 if (TREE_TYPE (decl) != error_mark_node)
1621 {
1622 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1623 address_mode = targetm.addr_space.address_mode (as);
1624 }
1625 x = gen_rtx_SYMBOL_REF (address_mode, name);
1626 }
1627 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
1628 SET_SYMBOL_REF_DECL (x, decl);
1629
1630 x = gen_rtx_MEM (DECL_MODE (decl), x);
1631 if (TREE_CODE (decl) != FUNCTION_DECL)
1632 set_mem_attributes (x, decl, 1);
1633 SET_DECL_RTL (decl, x);
1634
1635 /* Optionally set flags or add text to the name to record information
1636 such as that it is a function name.
1637 If the name is changed, the macro ASM_OUTPUT_LABELREF
1638 will have to know how to strip this information. */
1639 targetm.encode_section_info (decl, DECL_RTL (decl), true);
1640 }
1641
1642 /* Like make_decl_rtl, but inhibit creation of new alias sets when
1643 calling make_decl_rtl. Also, reset DECL_RTL before returning the
1644 rtl. */
1645
1646 rtx
1647 make_decl_rtl_for_debug (tree decl)
1648 {
1649 unsigned int save_aliasing_flag;
1650 rtx rtl;
1651
1652 if (DECL_RTL_SET_P (decl))
1653 return DECL_RTL (decl);
1654
1655 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will
1656 call new_alias_set. If running with -fcompare-debug, sometimes
1657 we do not want to create alias sets that will throw the alias
1658 numbers off in the comparison dumps. So... clearing
1659 flag_strict_aliasing will keep new_alias_set() from creating a
1660 new set. */
1661 save_aliasing_flag = flag_strict_aliasing;
1662 flag_strict_aliasing = 0;
1663
1664 rtl = DECL_RTL (decl);
1665 /* Reset DECL_RTL back, as various parts of the compiler expects
1666 DECL_RTL set meaning it is actually going to be output. */
1667 SET_DECL_RTL (decl, NULL);
1668
1669 flag_strict_aliasing = save_aliasing_flag;
1670 return rtl;
1671 }
1672 \f
1673 /* Output a string of literal assembler code
1674 for an `asm' keyword used between functions. */
1675
1676 void
1677 assemble_asm (tree string)
1678 {
1679 const char *p;
1680 app_enable ();
1681
1682 if (TREE_CODE (string) == ADDR_EXPR)
1683 string = TREE_OPERAND (string, 0);
1684
1685 p = TREE_STRING_POINTER (string);
1686 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p);
1687 }
1688
1689 /* Write the address of the entity given by SYMBOL to SEC. */
1690 void
1691 assemble_addr_to_section (rtx symbol, section *sec)
1692 {
1693 switch_to_section (sec);
1694 assemble_align (POINTER_SIZE);
1695 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1);
1696 }
1697
1698 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
1699 not) section for PRIORITY. */
1700 section *
1701 get_cdtor_priority_section (int priority, bool constructor_p)
1702 {
1703 /* Buffer conservatively large enough for the full range of a 32-bit
1704 int plus the text below. */
1705 char buf[18];
1706
1707 /* ??? This only works reliably with the GNU linker. */
1708 sprintf (buf, "%s.%.5u",
1709 constructor_p ? ".ctors" : ".dtors",
1710 /* Invert the numbering so the linker puts us in the proper
1711 order; constructors are run from right to left, and the
1712 linker sorts in increasing order. */
1713 MAX_INIT_PRIORITY - priority);
1714 return get_section (buf, SECTION_WRITE, NULL);
1715 }
1716
1717 void
1718 default_named_section_asm_out_destructor (rtx symbol, int priority)
1719 {
1720 section *sec;
1721
1722 if (priority != DEFAULT_INIT_PRIORITY)
1723 sec = get_cdtor_priority_section (priority,
1724 /*constructor_p=*/false);
1725 else
1726 sec = get_section (".dtors", SECTION_WRITE, NULL);
1727
1728 assemble_addr_to_section (symbol, sec);
1729 }
1730
1731 #ifdef DTORS_SECTION_ASM_OP
1732 void
1733 default_dtor_section_asm_out_destructor (rtx symbol,
1734 int priority ATTRIBUTE_UNUSED)
1735 {
1736 assemble_addr_to_section (symbol, dtors_section);
1737 }
1738 #endif
1739
1740 void
1741 default_named_section_asm_out_constructor (rtx symbol, int priority)
1742 {
1743 section *sec;
1744
1745 if (priority != DEFAULT_INIT_PRIORITY)
1746 sec = get_cdtor_priority_section (priority,
1747 /*constructor_p=*/true);
1748 else
1749 sec = get_section (".ctors", SECTION_WRITE, NULL);
1750
1751 assemble_addr_to_section (symbol, sec);
1752 }
1753
1754 #ifdef CTORS_SECTION_ASM_OP
1755 void
1756 default_ctor_section_asm_out_constructor (rtx symbol,
1757 int priority ATTRIBUTE_UNUSED)
1758 {
1759 assemble_addr_to_section (symbol, ctors_section);
1760 }
1761 #endif
1762 \f
1763 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with
1764 a nonzero value if the constant pool should be output before the
1765 start of the function, or a zero value if the pool should output
1766 after the end of the function. The default is to put it before the
1767 start. */
1768
1769 #ifndef CONSTANT_POOL_BEFORE_FUNCTION
1770 #define CONSTANT_POOL_BEFORE_FUNCTION 1
1771 #endif
1772
1773 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going
1774 to be output to assembler.
1775 Set first_global_object_name and weak_global_object_name as appropriate. */
1776
1777 void
1778 notice_global_symbol (tree decl)
1779 {
1780 const char **t = &first_global_object_name;
1781
1782 if (first_global_object_name
1783 || !TREE_PUBLIC (decl)
1784 || DECL_EXTERNAL (decl)
1785 || !DECL_NAME (decl)
1786 || (VAR_P (decl) && DECL_HARD_REGISTER (decl))
1787 || (TREE_CODE (decl) != FUNCTION_DECL
1788 && (!VAR_P (decl)
1789 || (DECL_COMMON (decl)
1790 && (DECL_INITIAL (decl) == 0
1791 || DECL_INITIAL (decl) == error_mark_node)))))
1792 return;
1793
1794 /* We win when global object is found, but it is useful to know about weak
1795 symbol as well so we can produce nicer unique names. */
1796 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
1797 t = &weak_global_object_name;
1798
1799 if (!*t)
1800 {
1801 tree id = DECL_ASSEMBLER_NAME (decl);
1802 ultimate_transparent_alias_target (&id);
1803 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id)));
1804 }
1805 }
1806
1807 /* If not using flag_reorder_blocks_and_partition, decide early whether the
1808 current function goes into the cold section, so that targets can use
1809 current_function_section during RTL expansion. DECL describes the
1810 function. */
1811
1812 void
1813 decide_function_section (tree decl)
1814 {
1815 first_function_block_is_cold = false;
1816
1817 if (DECL_SECTION_NAME (decl))
1818 {
1819 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1820 /* Calls to function_section rely on first_function_block_is_cold
1821 being accurate. */
1822 first_function_block_is_cold = (node
1823 && node->frequency
1824 == NODE_FREQUENCY_UNLIKELY_EXECUTED);
1825 }
1826
1827 in_cold_section_p = first_function_block_is_cold;
1828 }
1829
1830 /* Get the function's name, as described by its RTL. This may be
1831 different from the DECL_NAME name used in the source file. */
1832 const char *
1833 get_fnname_from_decl (tree decl)
1834 {
1835 rtx x = DECL_RTL (decl);
1836 gcc_assert (MEM_P (x));
1837 x = XEXP (x, 0);
1838 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1839 return XSTR (x, 0);
1840 }
1841
1842 /* Output assembler code for the constant pool of a function and associated
1843 with defining the name of the function. DECL describes the function.
1844 NAME is the function's name. For the constant pool, we use the current
1845 constant pool data. */
1846
1847 void
1848 assemble_start_function (tree decl, const char *fnname)
1849 {
1850 int align;
1851 char tmp_label[100];
1852 bool hot_label_written = false;
1853
1854 if (crtl->has_bb_partition)
1855 {
1856 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
1857 crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
1858 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
1859 crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
1860 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
1861 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
1862 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
1863 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
1864 const_labelno++;
1865 cold_function_name = NULL_TREE;
1866 }
1867 else
1868 {
1869 crtl->subsections.hot_section_label = NULL;
1870 crtl->subsections.cold_section_label = NULL;
1871 crtl->subsections.hot_section_end_label = NULL;
1872 crtl->subsections.cold_section_end_label = NULL;
1873 }
1874
1875 /* The following code does not need preprocessing in the assembler. */
1876
1877 app_disable ();
1878
1879 if (CONSTANT_POOL_BEFORE_FUNCTION)
1880 output_constant_pool (fnname, decl);
1881
1882 align = symtab_node::get (decl)->definition_alignment ();
1883
1884 /* Make sure the not and cold text (code) sections are properly
1885 aligned. This is necessary here in the case where the function
1886 has both hot and cold sections, because we don't want to re-set
1887 the alignment when the section switch happens mid-function. */
1888
1889 if (crtl->has_bb_partition)
1890 {
1891 first_function_block_is_cold = false;
1892
1893 switch_to_section (unlikely_text_section ());
1894 assemble_align (align);
1895 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
1896
1897 /* When the function starts with a cold section, we need to explicitly
1898 align the hot section and write out the hot section label.
1899 But if the current function is a thunk, we do not have a CFG. */
1900 if (!cfun->is_thunk
1901 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
1902 {
1903 switch_to_section (text_section);
1904 assemble_align (align);
1905 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1906 hot_label_written = true;
1907 first_function_block_is_cold = true;
1908 }
1909 in_cold_section_p = first_function_block_is_cold;
1910 }
1911
1912
1913 /* Switch to the correct text section for the start of the function. */
1914
1915 switch_to_section (function_section (decl), decl);
1916 if (crtl->has_bb_partition && !hot_label_written)
1917 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1918
1919 /* Tell assembler to move to target machine's alignment for functions. */
1920 align = floor_log2 (align / BITS_PER_UNIT);
1921 if (align > 0)
1922 {
1923 ASM_OUTPUT_ALIGN (asm_out_file, align);
1924 }
1925
1926 /* Handle a user-specified function alignment.
1927 Note that we still need to align to DECL_ALIGN, as above,
1928 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
1929 if (! DECL_USER_ALIGN (decl)
1930 && align_functions.levels[0].log > align
1931 && optimize_function_for_speed_p (cfun))
1932 {
1933 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1934 int align_log = align_functions.levels[0].log;
1935 #endif
1936 int max_skip = align_functions.levels[0].maxskip;
1937 if (flag_limit_function_alignment && crtl->max_insn_address > 0
1938 && max_skip >= crtl->max_insn_address)
1939 max_skip = crtl->max_insn_address - 1;
1940
1941 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1942 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
1943 if (max_skip == align_functions.levels[0].maxskip)
1944 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
1945 align_functions.levels[1].log,
1946 align_functions.levels[1].maxskip);
1947 #else
1948 ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log);
1949 #endif
1950 }
1951
1952 #ifdef ASM_OUTPUT_FUNCTION_PREFIX
1953 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname);
1954 #endif
1955
1956 if (!DECL_IGNORED_P (decl))
1957 (*debug_hooks->begin_function) (decl);
1958
1959 /* Make function name accessible from other files, if appropriate. */
1960
1961 if (TREE_PUBLIC (decl))
1962 {
1963 notice_global_symbol (decl);
1964
1965 globalize_decl (decl);
1966
1967 maybe_assemble_visibility (decl);
1968 }
1969
1970 if (DECL_PRESERVE_P (decl))
1971 targetm.asm_out.mark_decl_preserved (fnname);
1972
1973 unsigned short patch_area_size = crtl->patch_area_size;
1974 unsigned short patch_area_entry = crtl->patch_area_entry;
1975
1976 /* Emit the patching area before the entry label, if any. */
1977 if (patch_area_entry > 0)
1978 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1979 patch_area_entry, true);
1980
1981 /* Do any machine/system dependent processing of the function name. */
1982 #ifdef ASM_DECLARE_FUNCTION_NAME
1983 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
1984 #else
1985 /* Standard thing is just output label for the function. */
1986 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
1987 #endif /* ASM_DECLARE_FUNCTION_NAME */
1988
1989 /* And the area after the label. Record it if we haven't done so yet. */
1990 if (patch_area_size > patch_area_entry)
1991 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1992 patch_area_size
1993 - patch_area_entry,
1994 patch_area_entry == 0);
1995
1996 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
1997 saw_no_split_stack = true;
1998 }
1999
2000 /* Output assembler code associated with defining the size of the
2001 function. DECL describes the function. NAME is the function's name. */
2002
2003 void
2004 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED)
2005 {
2006 #ifdef ASM_DECLARE_FUNCTION_SIZE
2007 /* We could have switched section in the middle of the function. */
2008 if (crtl->has_bb_partition)
2009 switch_to_section (function_section (decl));
2010 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
2011 #endif
2012 if (! CONSTANT_POOL_BEFORE_FUNCTION)
2013 {
2014 output_constant_pool (fnname, decl);
2015 switch_to_section (function_section (decl)); /* need to switch back */
2016 }
2017 /* Output labels for end of hot/cold text sections (to be used by
2018 debug info.) */
2019 if (crtl->has_bb_partition)
2020 {
2021 section *save_text_section;
2022
2023 save_text_section = in_section;
2024 switch_to_section (unlikely_text_section ());
2025 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE
2026 if (cold_function_name != NULL_TREE)
2027 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file,
2028 IDENTIFIER_POINTER (cold_function_name),
2029 decl);
2030 #endif
2031 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
2032 if (first_function_block_is_cold)
2033 switch_to_section (text_section);
2034 else
2035 switch_to_section (function_section (decl));
2036 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
2037 switch_to_section (save_text_section);
2038 }
2039 }
2040 \f
2041 /* Assemble code to leave SIZE bytes of zeros. */
2042
2043 void
2044 assemble_zeros (unsigned HOST_WIDE_INT size)
2045 {
2046 /* Do no output if -fsyntax-only. */
2047 if (flag_syntax_only)
2048 return;
2049
2050 #ifdef ASM_NO_SKIP_IN_TEXT
2051 /* The `space' pseudo in the text section outputs nop insns rather than 0s,
2052 so we must output 0s explicitly in the text section. */
2053 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0)
2054 {
2055 unsigned HOST_WIDE_INT i;
2056 for (i = 0; i < size; i++)
2057 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1);
2058 }
2059 else
2060 #endif
2061 if (size > 0)
2062 ASM_OUTPUT_SKIP (asm_out_file, size);
2063 }
2064
2065 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
2066
2067 void
2068 assemble_align (unsigned int align)
2069 {
2070 if (align > BITS_PER_UNIT)
2071 {
2072 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2073 }
2074 }
2075
2076 /* Assemble a string constant with the specified C string as contents. */
2077
2078 void
2079 assemble_string (const char *p, int size)
2080 {
2081 int pos = 0;
2082 int maximum = 2000;
2083
2084 /* If the string is very long, split it up. */
2085
2086 while (pos < size)
2087 {
2088 int thissize = size - pos;
2089 if (thissize > maximum)
2090 thissize = maximum;
2091
2092 ASM_OUTPUT_ASCII (asm_out_file, p, thissize);
2093
2094 pos += thissize;
2095 p += thissize;
2096 }
2097 }
2098
2099 \f
2100 /* A noswitch_section_callback for lcomm_section. */
2101
2102 static bool
2103 emit_local (tree decl ATTRIBUTE_UNUSED,
2104 const char *name ATTRIBUTE_UNUSED,
2105 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2106 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2107 {
2108 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL
2109 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2110 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name,
2111 size, align);
2112 return true;
2113 #elif defined ASM_OUTPUT_ALIGNED_LOCAL
2114 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2115 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align);
2116 return true;
2117 #else
2118 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2119 return false;
2120 #endif
2121 }
2122
2123 /* A noswitch_section_callback for bss_noswitch_section. */
2124
2125 #if defined ASM_OUTPUT_ALIGNED_BSS
2126 static bool
2127 emit_bss (tree decl ATTRIBUTE_UNUSED,
2128 const char *name ATTRIBUTE_UNUSED,
2129 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2130 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2131 {
2132 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size,
2133 get_variable_align (decl));
2134 return true;
2135 }
2136 #endif
2137
2138 /* A noswitch_section_callback for comm_section. */
2139
2140 static bool
2141 emit_common (tree decl ATTRIBUTE_UNUSED,
2142 const char *name ATTRIBUTE_UNUSED,
2143 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2144 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2145 {
2146 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON
2147 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name,
2148 size, get_variable_align (decl));
2149 return true;
2150 #elif defined ASM_OUTPUT_ALIGNED_COMMON
2151 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size,
2152 get_variable_align (decl));
2153 return true;
2154 #else
2155 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded);
2156 return false;
2157 #endif
2158 }
2159
2160 /* A noswitch_section_callback for tls_comm_section. */
2161
2162 static bool
2163 emit_tls_common (tree decl ATTRIBUTE_UNUSED,
2164 const char *name ATTRIBUTE_UNUSED,
2165 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2166 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2167 {
2168 #ifdef ASM_OUTPUT_TLS_COMMON
2169 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size);
2170 return true;
2171 #else
2172 sorry ("thread-local COMMON data not implemented");
2173 return true;
2174 #endif
2175 }
2176
2177 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT.
2178 NAME is the name of DECL's SYMBOL_REF. */
2179
2180 static void
2181 assemble_noswitch_variable (tree decl, const char *name, section *sect,
2182 unsigned int align)
2183 {
2184 unsigned HOST_WIDE_INT size, rounded;
2185
2186 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2187 rounded = size;
2188
2189 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
2190 size += asan_red_zone_size (size);
2191
2192 /* Don't allocate zero bytes of common,
2193 since that means "undefined external" in the linker. */
2194 if (size == 0)
2195 rounded = 1;
2196
2197 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2198 so that each uninitialized object starts on such a boundary. */
2199 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1;
2200 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2201 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2202
2203 if (!sect->noswitch.callback (decl, name, size, rounded)
2204 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
2205 error ("requested alignment for %q+D is greater than "
2206 "implemented alignment of %wu", decl, rounded);
2207 }
2208
2209 /* A subroutine of assemble_variable. Output the label and contents of
2210 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
2211 is as for assemble_variable. */
2212
2213 static void
2214 assemble_variable_contents (tree decl, const char *name,
2215 bool dont_output_data, bool merge_strings)
2216 {
2217 /* Do any machine/system dependent processing of the object. */
2218 #ifdef ASM_DECLARE_OBJECT_NAME
2219 last_assemble_variable_decl = decl;
2220 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
2221 #else
2222 /* Standard thing is just output label for the object. */
2223 ASM_OUTPUT_LABEL (asm_out_file, name);
2224 #endif /* ASM_DECLARE_OBJECT_NAME */
2225
2226 if (!dont_output_data)
2227 {
2228 /* Caller is supposed to use varpool_get_constructor when it wants
2229 to output the body. */
2230 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node);
2231 if (DECL_INITIAL (decl)
2232 && DECL_INITIAL (decl) != error_mark_node
2233 && !initializer_zerop (DECL_INITIAL (decl)))
2234 /* Output the actual data. */
2235 output_constant (DECL_INITIAL (decl),
2236 tree_to_uhwi (DECL_SIZE_UNIT (decl)),
2237 get_variable_align (decl),
2238 false, merge_strings);
2239 else
2240 /* Leave space for it. */
2241 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
2242 targetm.asm_out.decl_end ();
2243 }
2244 }
2245
2246 /* Write out assembly for the variable DECL, which is not defined in
2247 the current translation unit. */
2248 void
2249 assemble_undefined_decl (tree decl)
2250 {
2251 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2252 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl);
2253 }
2254
2255 /* Assemble everything that is needed for a variable or function declaration.
2256 Not used for automatic variables, and not used for function definitions.
2257 Should not be called for variables of incomplete structure type.
2258
2259 TOP_LEVEL is nonzero if this variable has file scope.
2260 AT_END is nonzero if this is the special handling, at end of compilation,
2261 to define things that have had only tentative definitions.
2262 DONT_OUTPUT_DATA if nonzero means don't actually output the
2263 initial value (that will be done by the caller). */
2264
2265 void
2266 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
2267 int at_end ATTRIBUTE_UNUSED, int dont_output_data)
2268 {
2269 const char *name;
2270 rtx decl_rtl, symbol;
2271 section *sect;
2272 unsigned int align;
2273 bool asan_protected = false;
2274
2275 /* This function is supposed to handle VARIABLES. Ensure we have one. */
2276 gcc_assert (VAR_P (decl));
2277
2278 /* Emulated TLS had better not get this far. */
2279 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl));
2280
2281 last_assemble_variable_decl = 0;
2282
2283 /* Normally no need to say anything here for external references,
2284 since assemble_external is called by the language-specific code
2285 when a declaration is first seen. */
2286
2287 if (DECL_EXTERNAL (decl))
2288 return;
2289
2290 /* Do nothing for global register variables. */
2291 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)))
2292 {
2293 TREE_ASM_WRITTEN (decl) = 1;
2294 return;
2295 }
2296
2297 /* If type was incomplete when the variable was declared,
2298 see if it is complete now. */
2299
2300 if (DECL_SIZE (decl) == 0)
2301 layout_decl (decl, 0);
2302
2303 /* Still incomplete => don't allocate it; treat the tentative defn
2304 (which is what it must have been) as an `extern' reference. */
2305
2306 if (!dont_output_data && DECL_SIZE (decl) == 0)
2307 {
2308 error ("storage size of %q+D isn%'t known", decl);
2309 TREE_ASM_WRITTEN (decl) = 1;
2310 return;
2311 }
2312
2313 /* The first declaration of a variable that comes through this function
2314 decides whether it is global (in C, has external linkage)
2315 or local (in C, has internal linkage). So do nothing more
2316 if this function has already run. */
2317
2318 if (TREE_ASM_WRITTEN (decl))
2319 return;
2320
2321 /* Make sure targetm.encode_section_info is invoked before we set
2322 ASM_WRITTEN. */
2323 decl_rtl = DECL_RTL (decl);
2324
2325 TREE_ASM_WRITTEN (decl) = 1;
2326
2327 /* Do no output if -fsyntax-only. */
2328 if (flag_syntax_only)
2329 return;
2330
2331 if (! dont_output_data
2332 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl)))
2333 {
2334 error ("size of variable %q+D is too large", decl);
2335 return;
2336 }
2337
2338 gcc_assert (MEM_P (decl_rtl));
2339 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
2340 symbol = XEXP (decl_rtl, 0);
2341
2342 /* If this symbol belongs to the tree constant pool, output the constant
2343 if it hasn't already been written. */
2344 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2345 {
2346 tree decl = SYMBOL_REF_DECL (symbol);
2347 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
2348 output_constant_def_contents (symbol);
2349 return;
2350 }
2351
2352 app_disable ();
2353
2354 name = XSTR (symbol, 0);
2355 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
2356 notice_global_symbol (decl);
2357
2358 /* Compute the alignment of this data. */
2359
2360 align_variable (decl, dont_output_data);
2361
2362 if ((flag_sanitize & SANITIZE_ADDRESS)
2363 && asan_protect_global (decl))
2364 {
2365 asan_protected = true;
2366 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
2367 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
2368 }
2369
2370 set_mem_align (decl_rtl, DECL_ALIGN (decl));
2371
2372 align = get_variable_align (decl);
2373
2374 if (TREE_PUBLIC (decl))
2375 maybe_assemble_visibility (decl);
2376
2377 if (DECL_PRESERVE_P (decl))
2378 targetm.asm_out.mark_decl_preserved (name);
2379
2380 /* First make the assembler name(s) global if appropriate. */
2381 sect = get_variable_section (decl, false);
2382 if (TREE_PUBLIC (decl)
2383 && (sect->common.flags & SECTION_COMMON) == 0)
2384 globalize_decl (decl);
2385
2386 /* Output any data that we will need to use the address of. */
2387 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
2388 output_addressed_constants (DECL_INITIAL (decl), 0);
2389
2390 /* dbxout.c needs to know this. */
2391 if (sect && (sect->common.flags & SECTION_CODE) != 0)
2392 DECL_IN_TEXT_SECTION (decl) = 1;
2393
2394 /* If the decl is part of an object_block, make sure that the decl
2395 has been positioned within its block, but do not write out its
2396 definition yet. output_object_blocks will do that later. */
2397 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
2398 {
2399 gcc_assert (!dont_output_data);
2400 place_block_symbol (symbol);
2401 }
2402 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
2403 assemble_noswitch_variable (decl, name, sect, align);
2404 else
2405 {
2406 /* Special-case handling of vtv comdat sections. */
2407 if (sect->named.name
2408 && (strcmp (sect->named.name, ".vtable_map_vars") == 0))
2409 handle_vtv_comdat_section (sect, decl);
2410 else
2411 switch_to_section (sect, decl);
2412 if (align > BITS_PER_UNIT)
2413 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2414 assemble_variable_contents (decl, name, dont_output_data,
2415 (sect->common.flags & SECTION_MERGE)
2416 && (sect->common.flags & SECTION_STRINGS));
2417 if (asan_protected)
2418 {
2419 unsigned HOST_WIDE_INT int size
2420 = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2421 assemble_zeros (asan_red_zone_size (size));
2422 }
2423 }
2424 }
2425
2426
2427 /* Given a function declaration (FN_DECL), this function assembles the
2428 function into the .preinit_array section. */
2429
2430 void
2431 assemble_vtv_preinit_initializer (tree fn_decl)
2432 {
2433 section *sect;
2434 unsigned flags = SECTION_WRITE;
2435 rtx symbol = XEXP (DECL_RTL (fn_decl), 0);
2436
2437 flags |= SECTION_NOTYPE;
2438 sect = get_section (".preinit_array", flags, fn_decl);
2439 switch_to_section (sect);
2440 assemble_addr_to_section (symbol, sect);
2441 }
2442
2443 /* Return 1 if type TYPE contains any pointers. */
2444
2445 static int
2446 contains_pointers_p (tree type)
2447 {
2448 switch (TREE_CODE (type))
2449 {
2450 case POINTER_TYPE:
2451 case REFERENCE_TYPE:
2452 /* I'm not sure whether OFFSET_TYPE needs this treatment,
2453 so I'll play safe and return 1. */
2454 case OFFSET_TYPE:
2455 return 1;
2456
2457 case RECORD_TYPE:
2458 case UNION_TYPE:
2459 case QUAL_UNION_TYPE:
2460 {
2461 tree fields;
2462 /* For a type that has fields, see if the fields have pointers. */
2463 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields))
2464 if (TREE_CODE (fields) == FIELD_DECL
2465 && contains_pointers_p (TREE_TYPE (fields)))
2466 return 1;
2467 return 0;
2468 }
2469
2470 case ARRAY_TYPE:
2471 /* An array type contains pointers if its element type does. */
2472 return contains_pointers_p (TREE_TYPE (type));
2473
2474 default:
2475 return 0;
2476 }
2477 }
2478
2479 /* We delay assemble_external processing until
2480 the compilation unit is finalized. This is the best we can do for
2481 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
2482 it all the way to final. See PR 17982 for further discussion. */
2483 static GTY(()) tree pending_assemble_externals;
2484
2485 #ifdef ASM_OUTPUT_EXTERNAL
2486 /* Some targets delay some output to final using TARGET_ASM_FILE_END.
2487 As a result, assemble_external can be called after the list of externals
2488 is processed and the pointer set destroyed. */
2489 static bool pending_assemble_externals_processed;
2490
2491 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals
2492 TREE_LIST in assemble_external. */
2493 static hash_set<tree> *pending_assemble_externals_set;
2494
2495 /* True if DECL is a function decl for which no out-of-line copy exists.
2496 It is assumed that DECL's assembler name has been set. */
2497
2498 static bool
2499 incorporeal_function_p (tree decl)
2500 {
2501 if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl))
2502 {
2503 const char *name;
2504
2505 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2506 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2507 return true;
2508
2509 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2510 /* Atomic or sync builtins which have survived this far will be
2511 resolved externally and therefore are not incorporeal. */
2512 if (startswith (name, "__builtin_"))
2513 return true;
2514 }
2515 return false;
2516 }
2517
2518 /* Actually do the tests to determine if this is necessary, and invoke
2519 ASM_OUTPUT_EXTERNAL. */
2520 static void
2521 assemble_external_real (tree decl)
2522 {
2523 rtx rtl = DECL_RTL (decl);
2524
2525 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
2526 && !SYMBOL_REF_USED (XEXP (rtl, 0))
2527 && !incorporeal_function_p (decl))
2528 {
2529 /* Some systems do require some output. */
2530 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1;
2531 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0));
2532 }
2533 }
2534 #endif
2535
2536 void
2537 process_pending_assemble_externals (void)
2538 {
2539 #ifdef ASM_OUTPUT_EXTERNAL
2540 tree list;
2541 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list))
2542 assemble_external_real (TREE_VALUE (list));
2543
2544 pending_assemble_externals = 0;
2545 pending_assemble_externals_processed = true;
2546 delete pending_assemble_externals_set;
2547 #endif
2548 }
2549
2550 /* This TREE_LIST contains any weak symbol declarations waiting
2551 to be emitted. */
2552 static GTY(()) tree weak_decls;
2553
2554 /* Output something to declare an external symbol to the assembler,
2555 and qualifiers such as weakness. (Most assemblers don't need
2556 extern declaration, so we normally output nothing.) Do nothing if
2557 DECL is not external. */
2558
2559 void
2560 assemble_external (tree decl ATTRIBUTE_UNUSED)
2561 {
2562 /* Make sure that the ASM_OUT_FILE is open.
2563 If it's not, we should not be calling this function. */
2564 gcc_assert (asm_out_file);
2565
2566 /* In a perfect world, the following condition would be true.
2567 Sadly, the Go front end emit assembly *from the front end*,
2568 bypassing the call graph. See PR52739. Fix before GCC 4.8. */
2569 #if 0
2570 /* This function should only be called if we are expanding, or have
2571 expanded, to RTL.
2572 Ideally, only final.c would be calling this function, but it is
2573 not clear whether that would break things somehow. See PR 17982
2574 for further discussion. */
2575 gcc_assert (state == EXPANSION
2576 || state == FINISHED);
2577 #endif
2578
2579 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
2580 return;
2581
2582 /* We want to output annotation for weak and external symbols at
2583 very last to check if they are references or not. */
2584
2585 if (TARGET_SUPPORTS_WEAK
2586 && DECL_WEAK (decl)
2587 /* TREE_STATIC is a weird and abused creature which is not
2588 generally the right test for whether an entity has been
2589 locally emitted, inlined or otherwise not-really-extern, but
2590 for declarations that can be weak, it happens to be
2591 match. */
2592 && !TREE_STATIC (decl)
2593 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl))
2594 && value_member (decl, weak_decls) == NULL_TREE)
2595 weak_decls = tree_cons (NULL, decl, weak_decls);
2596
2597 #ifdef ASM_OUTPUT_EXTERNAL
2598 if (pending_assemble_externals_processed)
2599 {
2600 assemble_external_real (decl);
2601 return;
2602 }
2603
2604 if (! pending_assemble_externals_set->add (decl))
2605 pending_assemble_externals = tree_cons (NULL, decl,
2606 pending_assemble_externals);
2607 #endif
2608 }
2609
2610 /* Similar, for calling a library function FUN. */
2611
2612 void
2613 assemble_external_libcall (rtx fun)
2614 {
2615 /* Declare library function name external when first used, if nec. */
2616 if (! SYMBOL_REF_USED (fun))
2617 {
2618 SYMBOL_REF_USED (fun) = 1;
2619 targetm.asm_out.external_libcall (fun);
2620 }
2621 }
2622
2623 /* Assemble a label named NAME. */
2624
2625 void
2626 assemble_label (FILE *file, const char *name)
2627 {
2628 ASM_OUTPUT_LABEL (file, name);
2629 }
2630
2631 /* Set the symbol_referenced flag for ID. */
2632 void
2633 mark_referenced (tree id)
2634 {
2635 TREE_SYMBOL_REFERENCED (id) = 1;
2636 }
2637
2638 /* Set the symbol_referenced flag for DECL and notify callgraph. */
2639 void
2640 mark_decl_referenced (tree decl)
2641 {
2642 if (TREE_CODE (decl) == FUNCTION_DECL)
2643 {
2644 /* Extern inline functions don't become needed when referenced.
2645 If we know a method will be emitted in other TU and no new
2646 functions can be marked reachable, just use the external
2647 definition. */
2648 struct cgraph_node *node = cgraph_node::get_create (decl);
2649 if (!DECL_EXTERNAL (decl)
2650 && !node->definition)
2651 node->mark_force_output ();
2652 }
2653 else if (VAR_P (decl))
2654 {
2655 varpool_node *node = varpool_node::get_create (decl);
2656 /* C++ frontend use mark_decl_references to force COMDAT variables
2657 to be output that might appear dead otherwise. */
2658 node->force_output = true;
2659 }
2660 /* else do nothing - we can get various sorts of CST nodes here,
2661 which do not need to be marked. */
2662 }
2663
2664
2665 /* Output to FILE (an assembly file) a reference to NAME. If NAME
2666 starts with a *, the rest of NAME is output verbatim. Otherwise
2667 NAME is transformed in a target-specific way (usually by the
2668 addition of an underscore). */
2669
2670 void
2671 assemble_name_raw (FILE *file, const char *name)
2672 {
2673 if (name[0] == '*')
2674 fputs (&name[1], file);
2675 else
2676 ASM_OUTPUT_LABELREF (file, name);
2677 }
2678
2679 /* Return NAME that should actually be emitted, looking through
2680 transparent aliases. If NAME refers to an entity that is also
2681 represented as a tree (like a function or variable), mark the entity
2682 as referenced. */
2683 const char *
2684 assemble_name_resolve (const char *name)
2685 {
2686 const char *real_name = targetm.strip_name_encoding (name);
2687 tree id = maybe_get_identifier (real_name);
2688
2689 if (id)
2690 {
2691 tree id_orig = id;
2692
2693 mark_referenced (id);
2694 ultimate_transparent_alias_target (&id);
2695 if (id != id_orig)
2696 name = IDENTIFIER_POINTER (id);
2697 gcc_assert (! TREE_CHAIN (id));
2698 }
2699
2700 return name;
2701 }
2702
2703 /* Like assemble_name_raw, but should be used when NAME might refer to
2704 an entity that is also represented as a tree (like a function or
2705 variable). If NAME does refer to such an entity, that entity will
2706 be marked as referenced. */
2707
2708 void
2709 assemble_name (FILE *file, const char *name)
2710 {
2711 assemble_name_raw (file, assemble_name_resolve (name));
2712 }
2713
2714 /* Allocate SIZE bytes writable static space with a gensym name
2715 and return an RTX to refer to its address. */
2716
2717 rtx
2718 assemble_static_space (unsigned HOST_WIDE_INT size)
2719 {
2720 char name[17];
2721 const char *namestring;
2722 rtx x;
2723
2724 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno);
2725 ++const_labelno;
2726 namestring = ggc_strdup (name);
2727
2728 x = gen_rtx_SYMBOL_REF (Pmode, namestring);
2729 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL;
2730
2731 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL
2732 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size,
2733 BIGGEST_ALIGNMENT);
2734 #else
2735 #ifdef ASM_OUTPUT_ALIGNED_LOCAL
2736 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT);
2737 #else
2738 {
2739 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2740 so that each uninitialized object starts on such a boundary. */
2741 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */
2742 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED
2743 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1)
2744 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2745 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2746 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2747 }
2748 #endif
2749 #endif
2750 return x;
2751 }
2752
2753 /* Assemble the static constant template for function entry trampolines.
2754 This is done at most once per compilation.
2755 Returns an RTX for the address of the template. */
2756
2757 static GTY(()) rtx initial_trampoline;
2758
2759 rtx
2760 assemble_trampoline_template (void)
2761 {
2762 char label[256];
2763 const char *name;
2764 int align;
2765 rtx symbol;
2766
2767 gcc_assert (targetm.asm_out.trampoline_template != NULL);
2768
2769 if (initial_trampoline)
2770 return initial_trampoline;
2771
2772 /* By default, put trampoline templates in read-only data section. */
2773
2774 #ifdef TRAMPOLINE_SECTION
2775 switch_to_section (TRAMPOLINE_SECTION);
2776 #else
2777 switch_to_section (readonly_data_section);
2778 #endif
2779
2780 /* Write the assembler code to define one. */
2781 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
2782 if (align > 0)
2783 ASM_OUTPUT_ALIGN (asm_out_file, align);
2784
2785 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0);
2786 targetm.asm_out.trampoline_template (asm_out_file);
2787
2788 /* Record the rtl to refer to it. */
2789 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
2790 name = ggc_strdup (label);
2791 symbol = gen_rtx_SYMBOL_REF (Pmode, name);
2792 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
2793
2794 initial_trampoline = gen_const_mem (BLKmode, symbol);
2795 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
2796 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
2797
2798 return initial_trampoline;
2799 }
2800 \f
2801 /* A and B are either alignments or offsets. Return the minimum alignment
2802 that may be assumed after adding the two together. */
2803
2804 static inline unsigned
2805 min_align (unsigned int a, unsigned int b)
2806 {
2807 return least_bit_hwi (a | b);
2808 }
2809
2810 /* Return the assembler directive for creating a given kind of integer
2811 object. SIZE is the number of bytes in the object and ALIGNED_P
2812 indicates whether it is known to be aligned. Return NULL if the
2813 assembly dialect has no such directive.
2814
2815 The returned string should be printed at the start of a new line and
2816 be followed immediately by the object's initial value. */
2817
2818 const char *
2819 integer_asm_op (int size, int aligned_p)
2820 {
2821 struct asm_int_op *ops;
2822
2823 if (aligned_p)
2824 ops = &targetm.asm_out.aligned_op;
2825 else
2826 ops = &targetm.asm_out.unaligned_op;
2827
2828 switch (size)
2829 {
2830 case 1:
2831 return targetm.asm_out.byte_op;
2832 case 2:
2833 return ops->hi;
2834 case 3:
2835 return ops->psi;
2836 case 4:
2837 return ops->si;
2838 case 5:
2839 case 6:
2840 case 7:
2841 return ops->pdi;
2842 case 8:
2843 return ops->di;
2844 case 9:
2845 case 10:
2846 case 11:
2847 case 12:
2848 case 13:
2849 case 14:
2850 case 15:
2851 return ops->pti;
2852 case 16:
2853 return ops->ti;
2854 default:
2855 return NULL;
2856 }
2857 }
2858
2859 /* Use directive OP to assemble an integer object X. Print OP at the
2860 start of the line, followed immediately by the value of X. */
2861
2862 void
2863 assemble_integer_with_op (const char *op, rtx x)
2864 {
2865 fputs (op, asm_out_file);
2866 output_addr_const (asm_out_file, x);
2867 fputc ('\n', asm_out_file);
2868 }
2869
2870 /* The default implementation of the asm_out.integer target hook. */
2871
2872 bool
2873 default_assemble_integer (rtx x ATTRIBUTE_UNUSED,
2874 unsigned int size ATTRIBUTE_UNUSED,
2875 int aligned_p ATTRIBUTE_UNUSED)
2876 {
2877 const char *op = integer_asm_op (size, aligned_p);
2878 /* Avoid GAS bugs for large values. Specifically negative values whose
2879 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */
2880 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS)
2881 return false;
2882 return op && (assemble_integer_with_op (op, x), true);
2883 }
2884
2885 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is
2886 the alignment of the integer in bits. Return 1 if we were able to output
2887 the constant, otherwise 0. We must be able to output the constant,
2888 if FORCE is nonzero. */
2889
2890 bool
2891 assemble_integer (rtx x, unsigned int size, unsigned int align, int force)
2892 {
2893 int aligned_p;
2894
2895 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT));
2896
2897 /* See if the target hook can handle this kind of object. */
2898 if (targetm.asm_out.integer (x, size, aligned_p))
2899 return true;
2900
2901 /* If the object is a multi-byte one, try splitting it up. Split
2902 it into words it if is multi-word, otherwise split it into bytes. */
2903 if (size > 1)
2904 {
2905 machine_mode omode, imode;
2906 unsigned int subalign;
2907 unsigned int subsize, i;
2908 enum mode_class mclass;
2909
2910 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
2911 subalign = MIN (align, subsize * BITS_PER_UNIT);
2912 if (GET_CODE (x) == CONST_FIXED)
2913 mclass = GET_MODE_CLASS (GET_MODE (x));
2914 else
2915 mclass = MODE_INT;
2916
2917 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
2918 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
2919
2920 for (i = 0; i < size; i += subsize)
2921 {
2922 rtx partial = simplify_subreg (omode, x, imode, i);
2923 if (!partial || !assemble_integer (partial, subsize, subalign, 0))
2924 break;
2925 }
2926 if (i == size)
2927 return true;
2928
2929 /* If we've printed some of it, but not all of it, there's no going
2930 back now. */
2931 gcc_assert (!i);
2932 }
2933
2934 gcc_assert (!force);
2935
2936 return false;
2937 }
2938 \f
2939 /* Assemble the floating-point constant D into an object of size MODE. ALIGN
2940 is the alignment of the constant in bits. If REVERSE is true, D is output
2941 in reverse storage order. */
2942
2943 void
2944 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
2945 bool reverse)
2946 {
2947 long data[4] = {0, 0, 0, 0};
2948 int bitsize, nelts, nunits, units_per;
2949 rtx elt;
2950
2951 /* This is hairy. We have a quantity of known size. real_to_target
2952 will put it into an array of *host* longs, 32 bits per element
2953 (even if long is more than 32 bits). We need to determine the
2954 number of array elements that are occupied (nelts) and the number
2955 of *target* min-addressable units that will be occupied in the
2956 object file (nunits). We cannot assume that 32 divides the
2957 mode's bitsize (size * BITS_PER_UNIT) evenly.
2958
2959 size * BITS_PER_UNIT is used here to make sure that padding bits
2960 (which might appear at either end of the value; real_to_target
2961 will include the padding bits in its output array) are included. */
2962
2963 nunits = GET_MODE_SIZE (mode);
2964 bitsize = nunits * BITS_PER_UNIT;
2965 nelts = CEIL (bitsize, 32);
2966 units_per = 32 / BITS_PER_UNIT;
2967
2968 real_to_target (data, &d, mode);
2969
2970 /* Put out the first word with the specified alignment. */
2971 unsigned int chunk_nunits = MIN (nunits, units_per);
2972 if (reverse)
2973 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode));
2974 else
2975 elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT));
2976 assemble_integer (elt, chunk_nunits, align, 1);
2977 nunits -= chunk_nunits;
2978
2979 /* Subsequent words need only 32-bit alignment. */
2980 align = min_align (align, 32);
2981
2982 for (int i = 1; i < nelts; i++)
2983 {
2984 chunk_nunits = MIN (nunits, units_per);
2985 if (reverse)
2986 elt = flip_storage_order (SImode,
2987 gen_int_mode (data[nelts - 1 - i], SImode));
2988 else
2989 elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT));
2990 assemble_integer (elt, chunk_nunits, align, 1);
2991 nunits -= chunk_nunits;
2992 }
2993 }
2994 \f
2995 /* Given an expression EXP with a constant value,
2996 reduce it to the sum of an assembler symbol and an integer.
2997 Store them both in the structure *VALUE.
2998 EXP must be reducible. */
2999
3000 class addr_const {
3001 public:
3002 rtx base;
3003 poly_int64 offset;
3004 };
3005
3006 static void
3007 decode_addr_const (tree exp, class addr_const *value)
3008 {
3009 tree target = TREE_OPERAND (exp, 0);
3010 poly_int64 offset = 0;
3011 rtx x;
3012
3013 while (1)
3014 {
3015 poly_int64 bytepos;
3016 if (TREE_CODE (target) == COMPONENT_REF
3017 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
3018 &bytepos))
3019 {
3020 offset += bytepos;
3021 target = TREE_OPERAND (target, 0);
3022 }
3023 else if (TREE_CODE (target) == ARRAY_REF
3024 || TREE_CODE (target) == ARRAY_RANGE_REF)
3025 {
3026 /* Truncate big offset. */
3027 offset
3028 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
3029 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
3030 target = TREE_OPERAND (target, 0);
3031 }
3032 else if (TREE_CODE (target) == MEM_REF
3033 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
3034 {
3035 offset += mem_ref_offset (target).force_shwi ();
3036 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
3037 }
3038 else if (TREE_CODE (target) == INDIRECT_REF
3039 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
3040 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
3041 == ADDR_EXPR)
3042 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0);
3043 else
3044 break;
3045 }
3046
3047 switch (TREE_CODE (target))
3048 {
3049 case VAR_DECL:
3050 case FUNCTION_DECL:
3051 x = DECL_RTL (target);
3052 break;
3053
3054 case LABEL_DECL:
3055 x = gen_rtx_MEM (FUNCTION_MODE,
3056 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target)));
3057 break;
3058
3059 case REAL_CST:
3060 case FIXED_CST:
3061 case STRING_CST:
3062 case COMPLEX_CST:
3063 case CONSTRUCTOR:
3064 case INTEGER_CST:
3065 x = lookup_constant_def (target);
3066 /* Should have been added by output_addressed_constants. */
3067 gcc_assert (x);
3068 break;
3069
3070 case INDIRECT_REF:
3071 /* This deals with absolute addresses. */
3072 offset += tree_to_shwi (TREE_OPERAND (target, 0));
3073 x = gen_rtx_MEM (QImode,
3074 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
3075 break;
3076
3077 case COMPOUND_LITERAL_EXPR:
3078 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
3079 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
3080 break;
3081
3082 default:
3083 gcc_unreachable ();
3084 }
3085
3086 gcc_assert (MEM_P (x));
3087 x = XEXP (x, 0);
3088
3089 value->base = x;
3090 value->offset = offset;
3091 }
3092 \f
3093 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab;
3094
3095 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
3096
3097 /* Constant pool accessor function. */
3098
3099 hash_table<tree_descriptor_hasher> *
3100 constant_pool_htab (void)
3101 {
3102 return const_desc_htab;
3103 }
3104
3105 /* Compute a hash code for a constant expression. */
3106
3107 hashval_t
3108 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr)
3109 {
3110 return ptr->hash;
3111 }
3112
3113 static hashval_t
3114 const_hash_1 (const tree exp)
3115 {
3116 const char *p;
3117 hashval_t hi;
3118 int len, i;
3119 enum tree_code code = TREE_CODE (exp);
3120
3121 /* Either set P and LEN to the address and len of something to hash and
3122 exit the switch or return a value. */
3123
3124 switch (code)
3125 {
3126 case INTEGER_CST:
3127 p = (char *) &TREE_INT_CST_ELT (exp, 0);
3128 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
3129 break;
3130
3131 case REAL_CST:
3132 return real_hash (TREE_REAL_CST_PTR (exp));
3133
3134 case FIXED_CST:
3135 return fixed_hash (TREE_FIXED_CST_PTR (exp));
3136
3137 case STRING_CST:
3138 p = TREE_STRING_POINTER (exp);
3139 len = TREE_STRING_LENGTH (exp);
3140 break;
3141
3142 case COMPLEX_CST:
3143 return (const_hash_1 (TREE_REALPART (exp)) * 5
3144 + const_hash_1 (TREE_IMAGPART (exp)));
3145
3146 case VECTOR_CST:
3147 {
3148 hi = 7 + VECTOR_CST_NPATTERNS (exp);
3149 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
3150 unsigned int count = vector_cst_encoded_nelts (exp);
3151 for (unsigned int i = 0; i < count; ++i)
3152 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
3153 return hi;
3154 }
3155
3156 case CONSTRUCTOR:
3157 {
3158 unsigned HOST_WIDE_INT idx;
3159 tree value;
3160
3161 hi = 5 + int_size_in_bytes (TREE_TYPE (exp));
3162
3163 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
3164 if (value)
3165 hi = hi * 603 + const_hash_1 (value);
3166
3167 return hi;
3168 }
3169
3170 case ADDR_EXPR:
3171 if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0)))
3172 return const_hash_1 (TREE_OPERAND (exp, 0));
3173
3174 /* Fallthru. */
3175 case FDESC_EXPR:
3176 {
3177 class addr_const value;
3178
3179 decode_addr_const (exp, &value);
3180 switch (GET_CODE (value.base))
3181 {
3182 case SYMBOL_REF:
3183 /* Don't hash the address of the SYMBOL_REF;
3184 only use the offset and the symbol name. */
3185 hi = value.offset.coeffs[0];
3186 p = XSTR (value.base, 0);
3187 for (i = 0; p[i] != 0; i++)
3188 hi = ((hi * 613) + (unsigned) (p[i]));
3189 break;
3190
3191 case LABEL_REF:
3192 hi = (value.offset.coeffs[0]
3193 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
3194 break;
3195
3196 default:
3197 gcc_unreachable ();
3198 }
3199 }
3200 return hi;
3201
3202 case PLUS_EXPR:
3203 case POINTER_PLUS_EXPR:
3204 case MINUS_EXPR:
3205 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
3206 + const_hash_1 (TREE_OPERAND (exp, 1)));
3207
3208 CASE_CONVERT:
3209 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
3210
3211 default:
3212 /* A language specific constant. Just hash the code. */
3213 return code;
3214 }
3215
3216 /* Compute hashing function. */
3217 hi = len;
3218 for (i = 0; i < len; i++)
3219 hi = ((hi * 613) + (unsigned) (p[i]));
3220
3221 return hi;
3222 }
3223
3224 /* Wrapper of compare_constant, for the htab interface. */
3225 bool
3226 tree_descriptor_hasher::equal (constant_descriptor_tree *c1,
3227 constant_descriptor_tree *c2)
3228 {
3229 if (c1->hash != c2->hash)
3230 return 0;
3231 return compare_constant (c1->value, c2->value);
3232 }
3233
3234 /* Compare t1 and t2, and return 1 only if they are known to result in
3235 the same bit pattern on output. */
3236
3237 static int
3238 compare_constant (const tree t1, const tree t2)
3239 {
3240 enum tree_code typecode;
3241
3242 if (t1 == NULL_TREE)
3243 return t2 == NULL_TREE;
3244 if (t2 == NULL_TREE)
3245 return 0;
3246
3247 if (TREE_CODE (t1) != TREE_CODE (t2))
3248 return 0;
3249
3250 switch (TREE_CODE (t1))
3251 {
3252 case INTEGER_CST:
3253 /* Integer constants are the same only if the same width of type. */
3254 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3255 return 0;
3256 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3257 return 0;
3258 return tree_int_cst_equal (t1, t2);
3259
3260 case REAL_CST:
3261 /* Real constants are the same only if the same width of type. In
3262 addition to the same width, we need to check whether the modes are the
3263 same. There might be two floating point modes that are the same size
3264 but have different representations, such as the PowerPC that has 2
3265 different 128-bit floating point types (IBM extended double and IEEE
3266 128-bit floating point). */
3267 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3268 return 0;
3269 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3270 return 0;
3271 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3272
3273 case FIXED_CST:
3274 /* Fixed constants are the same only if the same width of type. */
3275 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3276 return 0;
3277
3278 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
3279
3280 case STRING_CST:
3281 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3282 || int_size_in_bytes (TREE_TYPE (t1))
3283 != int_size_in_bytes (TREE_TYPE (t2)))
3284 return 0;
3285
3286 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3287 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3288 TREE_STRING_LENGTH (t1)));
3289
3290 case COMPLEX_CST:
3291 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2))
3292 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2)));
3293
3294 case VECTOR_CST:
3295 {
3296 if (VECTOR_CST_NPATTERNS (t1)
3297 != VECTOR_CST_NPATTERNS (t2))
3298 return 0;
3299
3300 if (VECTOR_CST_NELTS_PER_PATTERN (t1)
3301 != VECTOR_CST_NELTS_PER_PATTERN (t2))
3302 return 0;
3303
3304 unsigned int count = vector_cst_encoded_nelts (t1);
3305 for (unsigned int i = 0; i < count; ++i)
3306 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
3307 VECTOR_CST_ENCODED_ELT (t2, i)))
3308 return 0;
3309
3310 return 1;
3311 }
3312
3313 case CONSTRUCTOR:
3314 {
3315 vec<constructor_elt, va_gc> *v1, *v2;
3316 unsigned HOST_WIDE_INT idx;
3317
3318 typecode = TREE_CODE (TREE_TYPE (t1));
3319 if (typecode != TREE_CODE (TREE_TYPE (t2)))
3320 return 0;
3321
3322 if (typecode == ARRAY_TYPE)
3323 {
3324 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1));
3325 /* For arrays, check that mode, size and storage order match. */
3326 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3327 || size_1 == -1
3328 || size_1 != int_size_in_bytes (TREE_TYPE (t2))
3329 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1))
3330 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2)))
3331 return 0;
3332 }
3333 else
3334 {
3335 /* For record and union constructors, require exact type
3336 equality. */
3337 if (TREE_TYPE (t1) != TREE_TYPE (t2))
3338 return 0;
3339 }
3340
3341 v1 = CONSTRUCTOR_ELTS (t1);
3342 v2 = CONSTRUCTOR_ELTS (t2);
3343 if (vec_safe_length (v1) != vec_safe_length (v2))
3344 return 0;
3345
3346 for (idx = 0; idx < vec_safe_length (v1); ++idx)
3347 {
3348 constructor_elt *c1 = &(*v1)[idx];
3349 constructor_elt *c2 = &(*v2)[idx];
3350
3351 /* Check that each value is the same... */
3352 if (!compare_constant (c1->value, c2->value))
3353 return 0;
3354 /* ... and that they apply to the same fields! */
3355 if (typecode == ARRAY_TYPE)
3356 {
3357 if (!compare_constant (c1->index, c2->index))
3358 return 0;
3359 }
3360 else
3361 {
3362 if (c1->index != c2->index)
3363 return 0;
3364 }
3365 }
3366
3367 return 1;
3368 }
3369
3370 case ADDR_EXPR:
3371 case FDESC_EXPR:
3372 {
3373 class addr_const value1, value2;
3374 enum rtx_code code;
3375 int ret;
3376
3377 decode_addr_const (t1, &value1);
3378 decode_addr_const (t2, &value2);
3379
3380 if (maybe_ne (value1.offset, value2.offset))
3381 return 0;
3382
3383 code = GET_CODE (value1.base);
3384 if (code != GET_CODE (value2.base))
3385 return 0;
3386
3387 switch (code)
3388 {
3389 case SYMBOL_REF:
3390 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0);
3391 break;
3392
3393 case LABEL_REF:
3394 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base))
3395 == CODE_LABEL_NUMBER (label_ref_label (value2.base)));
3396 break;
3397
3398 default:
3399 gcc_unreachable ();
3400 }
3401 return ret;
3402 }
3403
3404 case PLUS_EXPR:
3405 case POINTER_PLUS_EXPR:
3406 case MINUS_EXPR:
3407 case RANGE_EXPR:
3408 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3409 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3410
3411 CASE_CONVERT:
3412 case VIEW_CONVERT_EXPR:
3413 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3414
3415 default:
3416 return 0;
3417 }
3418 }
3419 \f
3420 /* Return the section into which constant EXP should be placed. */
3421
3422 static section *
3423 get_constant_section (tree exp, unsigned int align)
3424 {
3425 return targetm.asm_out.select_section (exp,
3426 compute_reloc_for_constant (exp),
3427 align);
3428 }
3429
3430 /* Return the size of constant EXP in bytes. */
3431
3432 static HOST_WIDE_INT
3433 get_constant_size (tree exp)
3434 {
3435 HOST_WIDE_INT size;
3436
3437 size = int_size_in_bytes (TREE_TYPE (exp));
3438 gcc_checking_assert (size >= 0);
3439 gcc_checking_assert (TREE_CODE (exp) != STRING_CST
3440 || size >= TREE_STRING_LENGTH (exp));
3441 return size;
3442 }
3443
3444 /* Subroutine of output_constant_def:
3445 No constant equal to EXP is known to have been output.
3446 Make a constant descriptor to enter EXP in the hash table.
3447 Assign the label number and construct RTL to refer to the
3448 constant's location in memory.
3449 Caller is responsible for updating the hash table. */
3450
3451 static struct constant_descriptor_tree *
3452 build_constant_desc (tree exp)
3453 {
3454 struct constant_descriptor_tree *desc;
3455 rtx symbol, rtl;
3456 char label[256];
3457 int labelno;
3458 tree decl;
3459
3460 desc = ggc_alloc<constant_descriptor_tree> ();
3461 desc->value = exp;
3462
3463 /* Create a string containing the label name, in LABEL. */
3464 labelno = const_labelno++;
3465 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
3466
3467 /* Construct the VAR_DECL associated with the constant. */
3468 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label),
3469 TREE_TYPE (exp));
3470 DECL_ARTIFICIAL (decl) = 1;
3471 DECL_IGNORED_P (decl) = 1;
3472 TREE_READONLY (decl) = 1;
3473 TREE_STATIC (decl) = 1;
3474 TREE_ADDRESSABLE (decl) = 1;
3475 /* We don't set the RTL yet as this would cause varpool to assume that the
3476 variable is referenced. Moreover, it would just be dropped in LTO mode.
3477 Instead we set the flag that will be recognized in make_decl_rtl. */
3478 DECL_IN_CONSTANT_POOL (decl) = 1;
3479 DECL_INITIAL (decl) = desc->value;
3480 /* ??? targetm.constant_alignment hasn't been updated for vector types on
3481 most architectures so use DATA_ALIGNMENT as well, except for strings. */
3482 if (TREE_CODE (exp) == STRING_CST)
3483 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
3484 else
3485 {
3486 align_variable (decl, 0);
3487 if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl))
3488 && ((optab_handler (movmisalign_optab, DECL_MODE (decl))
3489 != CODE_FOR_nothing)
3490 || targetm.slow_unaligned_access (DECL_MODE (decl),
3491 DECL_ALIGN (decl))))
3492 SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl)));
3493 }
3494
3495 /* Now construct the SYMBOL_REF and the MEM. */
3496 if (use_object_blocks_p ())
3497 {
3498 int align = (TREE_CODE (decl) == CONST_DECL
3499 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3500 ? DECL_ALIGN (decl)
3501 : symtab_node::get (decl)->definition_alignment ());
3502 section *sect = get_constant_section (exp, align);
3503 symbol = create_block_symbol (ggc_strdup (label),
3504 get_block_for_section (sect), -1);
3505 }
3506 else
3507 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3508 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3509 SET_SYMBOL_REF_DECL (symbol, decl);
3510 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3511
3512 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol);
3513 set_mem_alias_set (rtl, 0);
3514
3515 /* Putting EXP into the literal pool might have imposed a different
3516 alignment which should be visible in the RTX as well. */
3517 set_mem_align (rtl, DECL_ALIGN (decl));
3518
3519 /* We cannot share RTX'es in pool entries.
3520 Mark this piece of RTL as required for unsharing. */
3521 RTX_FLAG (rtl, used) = 1;
3522
3523 /* Set flags or add text to the name to record information, such as
3524 that it is a local symbol. If the name is changed, the macro
3525 ASM_OUTPUT_LABELREF will have to know how to strip this
3526 information. This call might invalidate our local variable
3527 SYMBOL; we can't use it afterward. */
3528 targetm.encode_section_info (exp, rtl, true);
3529
3530 desc->rtl = rtl;
3531
3532 return desc;
3533 }
3534
3535 /* Subroutine of output_constant_def and tree_output_constant_def:
3536 Add a constant to the hash table that tracks which constants
3537 already have labels. */
3538
3539 static constant_descriptor_tree *
3540 add_constant_to_table (tree exp, int defer)
3541 {
3542 /* The hash table methods may call output_constant_def for addressed
3543 constants, so handle them first. */
3544 output_addressed_constants (exp, defer);
3545
3546 /* Sanity check to catch recursive insertion. */
3547 static bool inserting;
3548 gcc_assert (!inserting);
3549 inserting = true;
3550
3551 /* Look up EXP in the table of constant descriptors. If we didn't
3552 find it, create a new one. */
3553 struct constant_descriptor_tree key;
3554 key.value = exp;
3555 key.hash = const_hash_1 (exp);
3556 constant_descriptor_tree **loc
3557 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3558
3559 inserting = false;
3560
3561 struct constant_descriptor_tree *desc = *loc;
3562 if (!desc)
3563 {
3564 desc = build_constant_desc (exp);
3565 desc->hash = key.hash;
3566 *loc = desc;
3567 }
3568
3569 return desc;
3570 }
3571
3572 /* Return an rtx representing a reference to constant data in memory
3573 for the constant expression EXP.
3574
3575 If assembler code for such a constant has already been output,
3576 return an rtx to refer to it.
3577 Otherwise, output such a constant in memory
3578 and generate an rtx for it.
3579
3580 If DEFER is nonzero, this constant can be deferred and output only
3581 if referenced in the function after all optimizations.
3582
3583 `const_desc_table' records which constants already have label strings. */
3584
3585 rtx
3586 output_constant_def (tree exp, int defer)
3587 {
3588 struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer);
3589 maybe_output_constant_def_contents (desc, defer);
3590 return desc->rtl;
3591 }
3592
3593 /* Subroutine of output_constant_def: Decide whether or not we need to
3594 output the constant DESC now, and if so, do it. */
3595 static void
3596 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc,
3597 int defer)
3598 {
3599 rtx symbol = XEXP (desc->rtl, 0);
3600 tree exp = desc->value;
3601
3602 if (flag_syntax_only)
3603 return;
3604
3605 if (TREE_ASM_WRITTEN (exp))
3606 /* Already output; don't do it again. */
3607 return;
3608
3609 /* We can always defer constants as long as the context allows
3610 doing so. */
3611 if (defer)
3612 {
3613 /* Increment n_deferred_constants if it exists. It needs to be at
3614 least as large as the number of constants actually referred to
3615 by the function. If it's too small we'll stop looking too early
3616 and fail to emit constants; if it's too large we'll only look
3617 through the entire function when we could have stopped earlier. */
3618 if (cfun)
3619 n_deferred_constants++;
3620 return;
3621 }
3622
3623 output_constant_def_contents (symbol);
3624 }
3625
3626 /* Subroutine of output_constant_def_contents. Output the definition
3627 of constant EXP, which is pointed to by label LABEL. ALIGN is the
3628 constant's alignment in bits. */
3629
3630 static void
3631 assemble_constant_contents (tree exp, const char *label, unsigned int align,
3632 bool merge_strings)
3633 {
3634 HOST_WIDE_INT size;
3635
3636 size = get_constant_size (exp);
3637
3638 /* Do any machine/system dependent processing of the constant. */
3639 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
3640
3641 /* Output the value of EXP. */
3642 output_constant (exp, size, align, false, merge_strings);
3643
3644 targetm.asm_out.decl_end ();
3645 }
3646
3647 /* We must output the constant data referred to by SYMBOL; do so. */
3648
3649 static void
3650 output_constant_def_contents (rtx symbol)
3651 {
3652 tree decl = SYMBOL_REF_DECL (symbol);
3653 tree exp = DECL_INITIAL (decl);
3654 bool asan_protected = false;
3655
3656 /* Make sure any other constants whose addresses appear in EXP
3657 are assigned label numbers. */
3658 output_addressed_constants (exp, 0);
3659
3660 /* We are no longer deferring this constant. */
3661 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1;
3662
3663 if ((flag_sanitize & SANITIZE_ADDRESS)
3664 && TREE_CODE (exp) == STRING_CST
3665 && asan_protect_global (exp))
3666 {
3667 asan_protected = true;
3668 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
3669 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
3670 }
3671
3672 /* If the constant is part of an object block, make sure that the
3673 decl has been positioned within its block, but do not write out
3674 its definition yet. output_object_blocks will do that later. */
3675 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
3676 place_block_symbol (symbol);
3677 else
3678 {
3679 int align = (TREE_CODE (decl) == CONST_DECL
3680 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3681 ? DECL_ALIGN (decl)
3682 : symtab_node::get (decl)->definition_alignment ());
3683 section *sect = get_constant_section (exp, align);
3684 switch_to_section (sect);
3685 if (align > BITS_PER_UNIT)
3686 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
3687 assemble_constant_contents (exp, XSTR (symbol, 0), align,
3688 (sect->common.flags & SECTION_MERGE)
3689 && (sect->common.flags & SECTION_STRINGS));
3690 if (asan_protected)
3691 {
3692 HOST_WIDE_INT size = get_constant_size (exp);
3693 assemble_zeros (asan_red_zone_size (size));
3694 }
3695 }
3696 }
3697
3698 /* Look up EXP in the table of constant descriptors. Return the rtl
3699 if it has been emitted, else null. */
3700
3701 rtx
3702 lookup_constant_def (tree exp)
3703 {
3704 struct constant_descriptor_tree key;
3705
3706 key.value = exp;
3707 key.hash = const_hash_1 (exp);
3708 constant_descriptor_tree *desc
3709 = const_desc_htab->find_with_hash (&key, key.hash);
3710
3711 return (desc ? desc->rtl : NULL_RTX);
3712 }
3713
3714 /* Return a tree representing a reference to constant data in memory
3715 for the constant expression EXP.
3716
3717 This is the counterpart of output_constant_def at the Tree level. */
3718
3719 tree
3720 tree_output_constant_def (tree exp)
3721 {
3722 struct constant_descriptor_tree *desc = add_constant_to_table (exp, 1);
3723 tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
3724 varpool_node::finalize_decl (decl);
3725 return decl;
3726 }
3727 \f
3728 class GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx {
3729 public:
3730 class constant_descriptor_rtx *next;
3731 rtx mem;
3732 rtx sym;
3733 rtx constant;
3734 HOST_WIDE_INT offset;
3735 hashval_t hash;
3736 fixed_size_mode mode;
3737 unsigned int align;
3738 int labelno;
3739 int mark;
3740 };
3741
3742 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx>
3743 {
3744 static hashval_t hash (constant_descriptor_rtx *);
3745 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *);
3746 };
3747
3748 /* Used in the hash tables to avoid outputting the same constant
3749 twice. Unlike 'struct constant_descriptor_tree', RTX constants
3750 are output once per function, not once per file. */
3751 /* ??? Only a few targets need per-function constant pools. Most
3752 can use one per-file pool. Should add a targetm bit to tell the
3753 difference. */
3754
3755 struct GTY(()) rtx_constant_pool {
3756 /* Pointers to first and last constant in pool, as ordered by offset. */
3757 class constant_descriptor_rtx *first;
3758 class constant_descriptor_rtx *last;
3759
3760 /* Hash facility for making memory-constants from constant rtl-expressions.
3761 It is used on RISC machines where immediate integer arguments and
3762 constant addresses are restricted so that such constants must be stored
3763 in memory. */
3764 hash_table<const_rtx_desc_hasher> *const_rtx_htab;
3765
3766 /* Current offset in constant pool (does not include any
3767 machine-specific header). */
3768 HOST_WIDE_INT offset;
3769 };
3770
3771 /* Hash and compare functions for const_rtx_htab. */
3772
3773 hashval_t
3774 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc)
3775 {
3776 return desc->hash;
3777 }
3778
3779 bool
3780 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x,
3781 constant_descriptor_rtx *y)
3782 {
3783 if (x->mode != y->mode)
3784 return 0;
3785 return rtx_equal_p (x->constant, y->constant);
3786 }
3787
3788 /* Hash one component of a constant. */
3789
3790 static hashval_t
3791 const_rtx_hash_1 (const_rtx x)
3792 {
3793 unsigned HOST_WIDE_INT hwi;
3794 machine_mode mode;
3795 enum rtx_code code;
3796 hashval_t h;
3797 int i;
3798
3799 code = GET_CODE (x);
3800 mode = GET_MODE (x);
3801 h = (hashval_t) code * 1048573 + mode;
3802
3803 switch (code)
3804 {
3805 case CONST_INT:
3806 hwi = INTVAL (x);
3807
3808 fold_hwi:
3809 {
3810 int shift = sizeof (hashval_t) * CHAR_BIT;
3811 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
3812
3813 h ^= (hashval_t) hwi;
3814 for (i = 1; i < n; ++i)
3815 {
3816 hwi >>= shift;
3817 h ^= (hashval_t) hwi;
3818 }
3819 }
3820 break;
3821
3822 case CONST_WIDE_INT:
3823 hwi = 0;
3824 {
3825 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
3826 hwi ^= CONST_WIDE_INT_ELT (x, i);
3827 goto fold_hwi;
3828 }
3829
3830 case CONST_DOUBLE:
3831 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
3832 {
3833 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
3834 goto fold_hwi;
3835 }
3836 else
3837 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
3838 break;
3839
3840 case CONST_FIXED:
3841 h ^= fixed_hash (CONST_FIXED_VALUE (x));
3842 break;
3843
3844 case SYMBOL_REF:
3845 h ^= htab_hash_string (XSTR (x, 0));
3846 break;
3847
3848 case LABEL_REF:
3849 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x));
3850 break;
3851
3852 case UNSPEC:
3853 case UNSPEC_VOLATILE:
3854 h = h * 251 + XINT (x, 1);
3855 break;
3856
3857 default:
3858 break;
3859 }
3860
3861 return h;
3862 }
3863
3864 /* Compute a hash value for X, which should be a constant. */
3865
3866 static hashval_t
3867 const_rtx_hash (rtx x)
3868 {
3869 hashval_t h = 0;
3870 subrtx_iterator::array_type array;
3871 FOR_EACH_SUBRTX (iter, array, x, ALL)
3872 h = h * 509 + const_rtx_hash_1 (*iter);
3873 return h;
3874 }
3875
3876 \f
3877 /* Create and return a new rtx constant pool. */
3878
3879 static struct rtx_constant_pool *
3880 create_constant_pool (void)
3881 {
3882 struct rtx_constant_pool *pool;
3883
3884 pool = ggc_alloc<rtx_constant_pool> ();
3885 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31);
3886 pool->first = NULL;
3887 pool->last = NULL;
3888 pool->offset = 0;
3889 return pool;
3890 }
3891
3892 /* Initialize constant pool hashing for a new function. */
3893
3894 void
3895 init_varasm_status (void)
3896 {
3897 crtl->varasm.pool = create_constant_pool ();
3898 crtl->varasm.deferred_constants = 0;
3899 }
3900 \f
3901 /* Given a MINUS expression, simplify it if both sides
3902 include the same symbol. */
3903
3904 rtx
3905 simplify_subtraction (rtx x)
3906 {
3907 rtx r = simplify_rtx (x);
3908 return r ? r : x;
3909 }
3910 \f
3911 /* Given a constant rtx X, make (or find) a memory constant for its value
3912 and return a MEM rtx to refer to it in memory. IN_MODE is the mode
3913 of X. */
3914
3915 rtx
3916 force_const_mem (machine_mode in_mode, rtx x)
3917 {
3918 class constant_descriptor_rtx *desc, tmp;
3919 struct rtx_constant_pool *pool;
3920 char label[256];
3921 rtx def, symbol;
3922 hashval_t hash;
3923 unsigned int align;
3924 constant_descriptor_rtx **slot;
3925 fixed_size_mode mode;
3926
3927 /* We can't force variable-sized objects to memory. */
3928 if (!is_a <fixed_size_mode> (in_mode, &mode))
3929 return NULL_RTX;
3930
3931 /* If we're not allowed to drop X into the constant pool, don't. */
3932 if (targetm.cannot_force_const_mem (mode, x))
3933 return NULL_RTX;
3934
3935 /* Record that this function has used a constant pool entry. */
3936 crtl->uses_const_pool = 1;
3937
3938 /* Decide which pool to use. */
3939 pool = (targetm.use_blocks_for_constant_p (mode, x)
3940 ? shared_constant_pool
3941 : crtl->varasm.pool);
3942
3943 /* Lookup the value in the hashtable. */
3944 tmp.constant = x;
3945 tmp.mode = mode;
3946 hash = const_rtx_hash (x);
3947 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT);
3948 desc = *slot;
3949
3950 /* If the constant was already present, return its memory. */
3951 if (desc)
3952 return copy_rtx (desc->mem);
3953
3954 /* Otherwise, create a new descriptor. */
3955 desc = ggc_alloc<constant_descriptor_rtx> ();
3956 *slot = desc;
3957
3958 /* Align the location counter as required by EXP's data type. */
3959 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
3960 align = targetm.static_rtx_alignment (align_mode);
3961
3962 pool->offset += (align / BITS_PER_UNIT) - 1;
3963 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
3964
3965 desc->next = NULL;
3966 desc->constant = copy_rtx (tmp.constant);
3967 desc->offset = pool->offset;
3968 desc->hash = hash;
3969 desc->mode = mode;
3970 desc->align = align;
3971 desc->labelno = const_labelno;
3972 desc->mark = 0;
3973
3974 pool->offset += GET_MODE_SIZE (mode);
3975 if (pool->last)
3976 pool->last->next = desc;
3977 else
3978 pool->first = pool->last = desc;
3979 pool->last = desc;
3980
3981 /* Create a string containing the label name, in LABEL. */
3982 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
3983 ++const_labelno;
3984
3985 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to
3986 the constants pool. */
3987 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
3988 {
3989 section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
3990 symbol = create_block_symbol (ggc_strdup (label),
3991 get_block_for_section (sect), -1);
3992 }
3993 else
3994 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3995 desc->sym = symbol;
3996 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3997 CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3998 SET_SYMBOL_REF_CONSTANT (symbol, desc);
3999
4000 /* Construct the MEM. */
4001 desc->mem = def = gen_const_mem (mode, symbol);
4002 set_mem_align (def, align);
4003
4004 /* If we're dropping a label to the constant pool, make sure we
4005 don't delete it. */
4006 if (GET_CODE (x) == LABEL_REF)
4007 LABEL_PRESERVE_P (XEXP (x, 0)) = 1;
4008
4009 return copy_rtx (def);
4010 }
4011 \f
4012 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */
4013
4014 rtx
4015 get_pool_constant (const_rtx addr)
4016 {
4017 return SYMBOL_REF_CONSTANT (addr)->constant;
4018 }
4019
4020 /* Given a constant pool SYMBOL_REF, return the corresponding constant
4021 and whether it has been output or not. */
4022
4023 rtx
4024 get_pool_constant_mark (rtx addr, bool *pmarked)
4025 {
4026 class constant_descriptor_rtx *desc;
4027
4028 desc = SYMBOL_REF_CONSTANT (addr);
4029 *pmarked = (desc->mark != 0);
4030 return desc->constant;
4031 }
4032
4033 /* Similar, return the mode. */
4034
4035 fixed_size_mode
4036 get_pool_mode (const_rtx addr)
4037 {
4038 return SYMBOL_REF_CONSTANT (addr)->mode;
4039 }
4040
4041 /* Return TRUE if and only if the constant pool has no entries. Note
4042 that even entries we might end up choosing not to emit are counted
4043 here, so there is the potential for missed optimizations. */
4044
4045 bool
4046 constant_pool_empty_p (void)
4047 {
4048 return crtl->varasm.pool->first == NULL;
4049 }
4050 \f
4051 /* Worker function for output_constant_pool_1. Emit assembly for X
4052 in MODE with known alignment ALIGN. */
4053
4054 static void
4055 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
4056 {
4057 switch (GET_MODE_CLASS (mode))
4058 {
4059 case MODE_FLOAT:
4060 case MODE_DECIMAL_FLOAT:
4061 {
4062 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
4063 assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
4064 as_a <scalar_float_mode> (mode), align, false);
4065 break;
4066 }
4067
4068 case MODE_INT:
4069 case MODE_PARTIAL_INT:
4070 case MODE_FRACT:
4071 case MODE_UFRACT:
4072 case MODE_ACCUM:
4073 case MODE_UACCUM:
4074 assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
4075 break;
4076
4077 case MODE_VECTOR_BOOL:
4078 {
4079 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4080
4081 /* Pick the smallest integer mode that contains at least one
4082 whole element. Often this is byte_mode and contains more
4083 than one element. */
4084 unsigned int nelts = GET_MODE_NUNITS (mode);
4085 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
4086 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
4087 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
4088
4089 /* Build the constant up one integer at a time. */
4090 unsigned int elts_per_int = int_bits / elt_bits;
4091 for (unsigned int i = 0; i < nelts; i += elts_per_int)
4092 {
4093 unsigned HOST_WIDE_INT value = 0;
4094 unsigned int limit = MIN (nelts - i, elts_per_int);
4095 for (unsigned int j = 0; j < limit; ++j)
4096 if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0)
4097 value |= 1 << (j * elt_bits);
4098 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
4099 i != 0 ? MIN (align, int_bits) : align);
4100 }
4101 break;
4102 }
4103 case MODE_VECTOR_FLOAT:
4104 case MODE_VECTOR_INT:
4105 case MODE_VECTOR_FRACT:
4106 case MODE_VECTOR_UFRACT:
4107 case MODE_VECTOR_ACCUM:
4108 case MODE_VECTOR_UACCUM:
4109 {
4110 int i, units;
4111 scalar_mode submode = GET_MODE_INNER (mode);
4112 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
4113
4114 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4115 units = GET_MODE_NUNITS (mode);
4116
4117 for (i = 0; i < units; i++)
4118 {
4119 rtx elt = CONST_VECTOR_ELT (x, i);
4120 output_constant_pool_2 (submode, elt, i ? subalign : align);
4121 }
4122 }
4123 break;
4124
4125 default:
4126 gcc_unreachable ();
4127 }
4128 }
4129
4130 /* Worker function for output_constant_pool. Emit constant DESC,
4131 giving it ALIGN bits of alignment. */
4132
4133 static void
4134 output_constant_pool_1 (class constant_descriptor_rtx *desc,
4135 unsigned int align)
4136 {
4137 rtx x, tmp;
4138
4139 x = desc->constant;
4140
4141 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
4142 whose CODE_LABEL has been deleted. This can occur if a jump table
4143 is eliminated by optimization. If so, write a constant of zero
4144 instead. Note that this can also happen by turning the
4145 CODE_LABEL into a NOTE. */
4146 /* ??? This seems completely and utterly wrong. Certainly it's
4147 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper
4148 functioning even with rtx_insn::deleted and friends. */
4149
4150 tmp = x;
4151 switch (GET_CODE (tmp))
4152 {
4153 case CONST:
4154 if (GET_CODE (XEXP (tmp, 0)) != PLUS
4155 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
4156 break;
4157 tmp = XEXP (XEXP (tmp, 0), 0);
4158 /* FALLTHRU */
4159
4160 case LABEL_REF:
4161 {
4162 rtx_insn *insn = label_ref_label (tmp);
4163 gcc_assert (!insn->deleted ());
4164 gcc_assert (!NOTE_P (insn)
4165 || NOTE_KIND (insn) != NOTE_INSN_DELETED);
4166 break;
4167 }
4168
4169 default:
4170 break;
4171 }
4172
4173 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4174 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
4175 align, desc->labelno, done);
4176 #endif
4177
4178 assemble_align (align);
4179
4180 /* Output the label. */
4181 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
4182
4183 /* Output the data.
4184 Pass actual alignment value while emitting string constant to asm code
4185 as function 'output_constant_pool_1' explicitly passes the alignment as 1
4186 assuming that the data is already aligned which prevents the generation
4187 of fix-up table entries. */
4188 output_constant_pool_2 (desc->mode, x, desc->align);
4189
4190 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
4191 sections have proper size. */
4192 if (align > GET_MODE_BITSIZE (desc->mode)
4193 && in_section
4194 && (in_section->common.flags & SECTION_MERGE))
4195 assemble_align (align);
4196
4197 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4198 done:
4199 #endif
4200 return;
4201 }
4202
4203 /* Recompute the offsets of entries in POOL, and the overall size of
4204 POOL. Do this after calling mark_constant_pool to ensure that we
4205 are computing the offset values for the pool which we will actually
4206 emit. */
4207
4208 static void
4209 recompute_pool_offsets (struct rtx_constant_pool *pool)
4210 {
4211 class constant_descriptor_rtx *desc;
4212 pool->offset = 0;
4213
4214 for (desc = pool->first; desc ; desc = desc->next)
4215 if (desc->mark)
4216 {
4217 /* Recalculate offset. */
4218 unsigned int align = desc->align;
4219 pool->offset += (align / BITS_PER_UNIT) - 1;
4220 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
4221 desc->offset = pool->offset;
4222 pool->offset += GET_MODE_SIZE (desc->mode);
4223 }
4224 }
4225
4226 /* Mark all constants that are referenced by SYMBOL_REFs in X.
4227 Emit referenced deferred strings. */
4228
4229 static void
4230 mark_constants_in_pattern (rtx insn)
4231 {
4232 subrtx_iterator::array_type array;
4233 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4234 {
4235 const_rtx x = *iter;
4236 if (GET_CODE (x) == SYMBOL_REF)
4237 {
4238 if (CONSTANT_POOL_ADDRESS_P (x))
4239 {
4240 class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x);
4241 if (desc->mark == 0)
4242 {
4243 desc->mark = 1;
4244 iter.substitute (desc->constant);
4245 }
4246 }
4247 else if (TREE_CONSTANT_POOL_ADDRESS_P (x))
4248 {
4249 tree decl = SYMBOL_REF_DECL (x);
4250 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
4251 {
4252 n_deferred_constants--;
4253 output_constant_def_contents (CONST_CAST_RTX (x));
4254 }
4255 }
4256 }
4257 }
4258 }
4259
4260 /* Look through appropriate parts of INSN, marking all entries in the
4261 constant pool which are actually being used. Entries that are only
4262 referenced by other constants are also marked as used. Emit
4263 deferred strings that are used. */
4264
4265 static void
4266 mark_constants (rtx_insn *insn)
4267 {
4268 if (!INSN_P (insn))
4269 return;
4270
4271 /* Insns may appear inside a SEQUENCE. Only check the patterns of
4272 insns, not any notes that may be attached. We don't want to mark
4273 a constant just because it happens to appear in a REG_EQUIV note. */
4274 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
4275 {
4276 int i, n = seq->len ();
4277 for (i = 0; i < n; ++i)
4278 {
4279 rtx subinsn = seq->element (i);
4280 if (INSN_P (subinsn))
4281 mark_constants_in_pattern (subinsn);
4282 }
4283 }
4284 else
4285 mark_constants_in_pattern (insn);
4286 }
4287
4288 /* Look through the instructions for this function, and mark all the
4289 entries in POOL which are actually being used. Emit deferred constants
4290 which have indeed been used. */
4291
4292 static void
4293 mark_constant_pool (void)
4294 {
4295 rtx_insn *insn;
4296
4297 if (!crtl->uses_const_pool && n_deferred_constants == 0)
4298 return;
4299
4300 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4301 mark_constants (insn);
4302 }
4303
4304 /* Write all the constants in POOL. */
4305
4306 static void
4307 output_constant_pool_contents (struct rtx_constant_pool *pool)
4308 {
4309 class constant_descriptor_rtx *desc;
4310
4311 for (desc = pool->first; desc ; desc = desc->next)
4312 if (desc->mark < 0)
4313 {
4314 #ifdef ASM_OUTPUT_DEF
4315 const char *name = XSTR (desc->sym, 0);
4316 char label[256];
4317 char buffer[256 + 32];
4318 const char *p;
4319
4320 ASM_GENERATE_INTERNAL_LABEL (label, "LC", ~desc->mark);
4321 p = label;
4322 if (desc->offset)
4323 {
4324 sprintf (buffer, "%s+%ld", p, (long) (desc->offset));
4325 p = buffer;
4326 }
4327 ASM_OUTPUT_DEF (asm_out_file, name, p);
4328 #else
4329 gcc_unreachable ();
4330 #endif
4331 }
4332 else if (desc->mark)
4333 {
4334 /* If the constant is part of an object_block, make sure that
4335 the constant has been positioned within its block, but do not
4336 write out its definition yet. output_object_blocks will do
4337 that later. */
4338 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4339 && SYMBOL_REF_BLOCK (desc->sym))
4340 place_block_symbol (desc->sym);
4341 else
4342 {
4343 switch_to_section (targetm.asm_out.select_rtx_section
4344 (desc->mode, desc->constant, desc->align));
4345 output_constant_pool_1 (desc, desc->align);
4346 }
4347 }
4348 }
4349
4350 struct constant_descriptor_rtx_data {
4351 constant_descriptor_rtx *desc;
4352 target_unit *bytes;
4353 unsigned short size;
4354 unsigned short offset;
4355 unsigned int hash;
4356 };
4357
4358 /* qsort callback to sort constant_descriptor_rtx_data * vector by
4359 decreasing size. */
4360
4361 static int
4362 constant_descriptor_rtx_data_cmp (const void *p1, const void *p2)
4363 {
4364 constant_descriptor_rtx_data *const data1
4365 = *(constant_descriptor_rtx_data * const *) p1;
4366 constant_descriptor_rtx_data *const data2
4367 = *(constant_descriptor_rtx_data * const *) p2;
4368 if (data1->size > data2->size)
4369 return -1;
4370 if (data1->size < data2->size)
4371 return 1;
4372 if (data1->hash < data2->hash)
4373 return -1;
4374 gcc_assert (data1->hash > data2->hash);
4375 return 1;
4376 }
4377
4378 struct const_rtx_data_hasher : nofree_ptr_hash<constant_descriptor_rtx_data>
4379 {
4380 static hashval_t hash (constant_descriptor_rtx_data *);
4381 static bool equal (constant_descriptor_rtx_data *,
4382 constant_descriptor_rtx_data *);
4383 };
4384
4385 /* Hash and compare functions for const_rtx_data_htab. */
4386
4387 hashval_t
4388 const_rtx_data_hasher::hash (constant_descriptor_rtx_data *data)
4389 {
4390 return data->hash;
4391 }
4392
4393 bool
4394 const_rtx_data_hasher::equal (constant_descriptor_rtx_data *x,
4395 constant_descriptor_rtx_data *y)
4396 {
4397 if (x->hash != y->hash || x->size != y->size)
4398 return 0;
4399 unsigned int align1 = x->desc->align;
4400 unsigned int align2 = y->desc->align;
4401 unsigned int offset1 = (x->offset * BITS_PER_UNIT) & (align1 - 1);
4402 unsigned int offset2 = (y->offset * BITS_PER_UNIT) & (align2 - 1);
4403 if (offset1)
4404 align1 = least_bit_hwi (offset1);
4405 if (offset2)
4406 align2 = least_bit_hwi (offset2);
4407 if (align2 > align1)
4408 return 0;
4409 if (memcmp (x->bytes, y->bytes, x->size * sizeof (target_unit)) != 0)
4410 return 0;
4411 return 1;
4412 }
4413
4414 /* Attempt to optimize constant pool POOL. If it contains both CONST_VECTOR
4415 constants and scalar constants with the values of CONST_VECTOR elements,
4416 try to alias the scalar constants with the CONST_VECTOR elements. */
4417
4418 static void
4419 optimize_constant_pool (struct rtx_constant_pool *pool)
4420 {
4421 auto_vec<target_unit, 128> buffer;
4422 auto_vec<constant_descriptor_rtx_data *, 128> vec;
4423 object_allocator<constant_descriptor_rtx_data>
4424 data_pool ("constant_descriptor_rtx_data_pool");
4425 int idx = 0;
4426 size_t size = 0;
4427 for (constant_descriptor_rtx *desc = pool->first; desc; desc = desc->next)
4428 if (desc->mark > 0
4429 && ! (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4430 && SYMBOL_REF_BLOCK (desc->sym)))
4431 {
4432 buffer.truncate (0);
4433 buffer.reserve (GET_MODE_SIZE (desc->mode));
4434 if (native_encode_rtx (desc->mode, desc->constant, buffer, 0,
4435 GET_MODE_SIZE (desc->mode)))
4436 {
4437 constant_descriptor_rtx_data *data = data_pool.allocate ();
4438 data->desc = desc;
4439 data->bytes = NULL;
4440 data->size = GET_MODE_SIZE (desc->mode);
4441 data->offset = 0;
4442 data->hash = idx++;
4443 size += data->size;
4444 vec.safe_push (data);
4445 }
4446 }
4447 if (idx)
4448 {
4449 vec.qsort (constant_descriptor_rtx_data_cmp);
4450 unsigned min_size = vec.last ()->size;
4451 target_unit *bytes = XNEWVEC (target_unit, size);
4452 unsigned int i;
4453 constant_descriptor_rtx_data *data;
4454 hash_table<const_rtx_data_hasher> * htab
4455 = new hash_table<const_rtx_data_hasher> (31);
4456 size = 0;
4457 FOR_EACH_VEC_ELT (vec, i, data)
4458 {
4459 buffer.truncate (0);
4460 native_encode_rtx (data->desc->mode, data->desc->constant,
4461 buffer, 0, data->size);
4462 memcpy (bytes + size, buffer.address (), data->size);
4463 data->bytes = bytes + size;
4464 data->hash = iterative_hash (data->bytes,
4465 data->size * sizeof (target_unit), 0);
4466 size += data->size;
4467 constant_descriptor_rtx_data **slot
4468 = htab->find_slot_with_hash (data, data->hash, INSERT);
4469 if (*slot)
4470 {
4471 data->desc->mark = ~(*slot)->desc->labelno;
4472 data->desc->offset = (*slot)->offset;
4473 }
4474 else
4475 {
4476 unsigned int sz = 1 << floor_log2 (data->size);
4477
4478 *slot = data;
4479 for (sz >>= 1; sz >= min_size; sz >>= 1)
4480 for (unsigned off = 0; off + sz <= data->size; off += sz)
4481 {
4482 constant_descriptor_rtx_data tmp;
4483 tmp.desc = data->desc;
4484 tmp.bytes = data->bytes + off;
4485 tmp.size = sz;
4486 tmp.offset = off;
4487 tmp.hash = iterative_hash (tmp.bytes,
4488 sz * sizeof (target_unit), 0);
4489 slot = htab->find_slot_with_hash (&tmp, tmp.hash, INSERT);
4490 if (*slot == NULL)
4491 {
4492 *slot = data_pool.allocate ();
4493 **slot = tmp;
4494 }
4495 }
4496 }
4497 }
4498 delete htab;
4499 XDELETE (bytes);
4500 }
4501 data_pool.release ();
4502 }
4503
4504 /* Mark all constants that are used in the current function, then write
4505 out the function's private constant pool. */
4506
4507 static void
4508 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
4509 tree fndecl ATTRIBUTE_UNUSED)
4510 {
4511 struct rtx_constant_pool *pool = crtl->varasm.pool;
4512
4513 /* It is possible for gcc to call force_const_mem and then to later
4514 discard the instructions which refer to the constant. In such a
4515 case we do not need to output the constant. */
4516 mark_constant_pool ();
4517
4518 /* Having marked the constant pool entries we'll actually emit, we
4519 now need to rebuild the offset information, which may have become
4520 stale. */
4521 recompute_pool_offsets (pool);
4522
4523 #ifdef ASM_OUTPUT_POOL_PROLOGUE
4524 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset);
4525 #endif
4526
4527 output_constant_pool_contents (pool);
4528
4529 #ifdef ASM_OUTPUT_POOL_EPILOGUE
4530 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
4531 #endif
4532 }
4533 \f
4534 /* Write the contents of the shared constant pool. */
4535
4536 void
4537 output_shared_constant_pool (void)
4538 {
4539 if (optimize
4540 && TARGET_SUPPORTS_ALIASES)
4541 optimize_constant_pool (shared_constant_pool);
4542
4543 output_constant_pool_contents (shared_constant_pool);
4544 }
4545 \f
4546 /* Determine what kind of relocations EXP may need. */
4547
4548 int
4549 compute_reloc_for_constant (tree exp)
4550 {
4551 int reloc = 0, reloc2;
4552 tree tem;
4553
4554 switch (TREE_CODE (exp))
4555 {
4556 case ADDR_EXPR:
4557 case FDESC_EXPR:
4558 /* Go inside any operations that get_inner_reference can handle and see
4559 if what's inside is a constant: no need to do anything here for
4560 addresses of variables or functions. */
4561 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4562 tem = TREE_OPERAND (tem, 0))
4563 ;
4564
4565 if (TREE_CODE (tem) == MEM_REF
4566 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR)
4567 {
4568 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0));
4569 break;
4570 }
4571
4572 if (!targetm.binds_local_p (tem))
4573 reloc |= 2;
4574 else
4575 reloc |= 1;
4576 break;
4577
4578 case PLUS_EXPR:
4579 case POINTER_PLUS_EXPR:
4580 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4581 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4582 break;
4583
4584 case MINUS_EXPR:
4585 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4586 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4587 /* The difference of two local labels is computable at link time. */
4588 if (reloc == 1 && reloc2 == 1)
4589 reloc = 0;
4590 else
4591 reloc |= reloc2;
4592 break;
4593
4594 CASE_CONVERT:
4595 case VIEW_CONVERT_EXPR:
4596 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4597 break;
4598
4599 case CONSTRUCTOR:
4600 {
4601 unsigned HOST_WIDE_INT idx;
4602 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4603 if (tem != 0)
4604 reloc |= compute_reloc_for_constant (tem);
4605 }
4606 break;
4607
4608 default:
4609 break;
4610 }
4611 return reloc;
4612 }
4613
4614 /* Find all the constants whose addresses are referenced inside of EXP,
4615 and make sure assembler code with a label has been output for each one.
4616 Indicate whether an ADDR_EXPR has been encountered. */
4617
4618 static void
4619 output_addressed_constants (tree exp, int defer)
4620 {
4621 tree tem;
4622
4623 switch (TREE_CODE (exp))
4624 {
4625 case ADDR_EXPR:
4626 case FDESC_EXPR:
4627 /* Go inside any operations that get_inner_reference can handle and see
4628 if what's inside is a constant: no need to do anything here for
4629 addresses of variables or functions. */
4630 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4631 tem = TREE_OPERAND (tem, 0))
4632 ;
4633
4634 /* If we have an initialized CONST_DECL, retrieve the initializer. */
4635 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem))
4636 tem = DECL_INITIAL (tem);
4637
4638 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR)
4639 output_constant_def (tem, defer);
4640
4641 if (TREE_CODE (tem) == MEM_REF)
4642 output_addressed_constants (TREE_OPERAND (tem, 0), defer);
4643 break;
4644
4645 case PLUS_EXPR:
4646 case POINTER_PLUS_EXPR:
4647 case MINUS_EXPR:
4648 output_addressed_constants (TREE_OPERAND (exp, 1), defer);
4649 gcc_fallthrough ();
4650
4651 CASE_CONVERT:
4652 case VIEW_CONVERT_EXPR:
4653 output_addressed_constants (TREE_OPERAND (exp, 0), defer);
4654 break;
4655
4656 case CONSTRUCTOR:
4657 {
4658 unsigned HOST_WIDE_INT idx;
4659 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4660 if (tem != 0)
4661 output_addressed_constants (tem, defer);
4662 }
4663 break;
4664
4665 default:
4666 break;
4667 }
4668 }
4669 \f
4670 /* Whether a constructor CTOR is a valid static constant initializer if all
4671 its elements are. This used to be internal to initializer_constant_valid_p
4672 and has been exposed to let other functions like categorize_ctor_elements
4673 evaluate the property while walking a constructor for other purposes. */
4674
4675 bool
4676 constructor_static_from_elts_p (const_tree ctor)
4677 {
4678 return (TREE_CONSTANT (ctor)
4679 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4680 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE
4681 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE));
4682 }
4683
4684 static tree initializer_constant_valid_p_1 (tree value, tree endtype,
4685 tree *cache);
4686
4687 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
4688 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
4689 which are valid when ENDTYPE is an integer of any size; in
4690 particular, this does not accept a pointer minus a constant. This
4691 returns null_pointer_node if the VALUE is an absolute constant
4692 which can be used to initialize a static variable. Otherwise it
4693 returns NULL. */
4694
4695 static tree
4696 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache)
4697 {
4698 tree op0, op1;
4699
4700 if (!INTEGRAL_TYPE_P (endtype))
4701 return NULL_TREE;
4702
4703 op0 = TREE_OPERAND (value, 0);
4704 op1 = TREE_OPERAND (value, 1);
4705
4706 /* Like STRIP_NOPS except allow the operand mode to widen. This
4707 works around a feature of fold that simplifies (int)(p1 - p2) to
4708 ((int)p1 - (int)p2) under the theory that the narrower operation
4709 is cheaper. */
4710
4711 while (CONVERT_EXPR_P (op0)
4712 || TREE_CODE (op0) == NON_LVALUE_EXPR)
4713 {
4714 tree inner = TREE_OPERAND (op0, 0);
4715 if (inner == error_mark_node
4716 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4717 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
4718 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4719 break;
4720 op0 = inner;
4721 }
4722
4723 while (CONVERT_EXPR_P (op1)
4724 || TREE_CODE (op1) == NON_LVALUE_EXPR)
4725 {
4726 tree inner = TREE_OPERAND (op1, 0);
4727 if (inner == error_mark_node
4728 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4729 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
4730 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4731 break;
4732 op1 = inner;
4733 }
4734
4735 op0 = initializer_constant_valid_p_1 (op0, endtype, cache);
4736 if (!op0)
4737 return NULL_TREE;
4738
4739 op1 = initializer_constant_valid_p_1 (op1, endtype,
4740 cache ? cache + 2 : NULL);
4741 /* Both initializers must be known. */
4742 if (op1)
4743 {
4744 if (op0 == op1
4745 && (op0 == null_pointer_node
4746 || TREE_CODE (value) == MINUS_EXPR))
4747 return null_pointer_node;
4748
4749 /* Support differences between labels. */
4750 if (TREE_CODE (op0) == LABEL_DECL
4751 && TREE_CODE (op1) == LABEL_DECL)
4752 return null_pointer_node;
4753
4754 if (TREE_CODE (op0) == STRING_CST
4755 && TREE_CODE (op1) == STRING_CST
4756 && operand_equal_p (op0, op1, 1))
4757 return null_pointer_node;
4758 }
4759
4760 return NULL_TREE;
4761 }
4762
4763 /* Helper function of initializer_constant_valid_p.
4764 Return nonzero if VALUE is a valid constant-valued expression
4765 for use in initializing a static variable; one that can be an
4766 element of a "constant" initializer.
4767
4768 Return null_pointer_node if the value is absolute;
4769 if it is relocatable, return the variable that determines the relocation.
4770 We assume that VALUE has been folded as much as possible;
4771 therefore, we do not need to check for such things as
4772 arithmetic-combinations of integers.
4773
4774 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */
4775
4776 static tree
4777 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache)
4778 {
4779 tree ret;
4780
4781 switch (TREE_CODE (value))
4782 {
4783 case CONSTRUCTOR:
4784 if (constructor_static_from_elts_p (value))
4785 {
4786 unsigned HOST_WIDE_INT idx;
4787 tree elt;
4788 bool absolute = true;
4789
4790 if (cache && cache[0] == value)
4791 return cache[1];
4792 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4793 {
4794 tree reloc;
4795 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt),
4796 NULL);
4797 if (!reloc
4798 /* An absolute value is required with reverse SSO. */
4799 || (reloc != null_pointer_node
4800 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value))
4801 && !AGGREGATE_TYPE_P (TREE_TYPE (elt))))
4802 {
4803 if (cache)
4804 {
4805 cache[0] = value;
4806 cache[1] = NULL_TREE;
4807 }
4808 return NULL_TREE;
4809 }
4810 if (reloc != null_pointer_node)
4811 absolute = false;
4812 }
4813 /* For a non-absolute relocation, there is no single
4814 variable that can be "the variable that determines the
4815 relocation." */
4816 if (cache)
4817 {
4818 cache[0] = value;
4819 cache[1] = absolute ? null_pointer_node : error_mark_node;
4820 }
4821 return absolute ? null_pointer_node : error_mark_node;
4822 }
4823
4824 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE;
4825
4826 case INTEGER_CST:
4827 case VECTOR_CST:
4828 case REAL_CST:
4829 case FIXED_CST:
4830 case STRING_CST:
4831 case COMPLEX_CST:
4832 return null_pointer_node;
4833
4834 case ADDR_EXPR:
4835 case FDESC_EXPR:
4836 {
4837 tree op0 = staticp (TREE_OPERAND (value, 0));
4838 if (op0)
4839 {
4840 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
4841 to be a constant, this is old-skool offsetof-like nonsense. */
4842 if (TREE_CODE (op0) == INDIRECT_REF
4843 && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
4844 return null_pointer_node;
4845 /* Taking the address of a nested function involves a trampoline,
4846 unless we don't need or want one. */
4847 if (TREE_CODE (op0) == FUNCTION_DECL
4848 && DECL_STATIC_CHAIN (op0)
4849 && !TREE_NO_TRAMPOLINE (value))
4850 return NULL_TREE;
4851 /* "&{...}" requires a temporary to hold the constructed
4852 object. */
4853 if (TREE_CODE (op0) == CONSTRUCTOR)
4854 return NULL_TREE;
4855 }
4856 return op0;
4857 }
4858
4859 case NON_LVALUE_EXPR:
4860 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4861 endtype, cache);
4862
4863 case VIEW_CONVERT_EXPR:
4864 {
4865 tree src = TREE_OPERAND (value, 0);
4866 tree src_type = TREE_TYPE (src);
4867 tree dest_type = TREE_TYPE (value);
4868
4869 /* Allow view-conversions from aggregate to non-aggregate type only
4870 if the bit pattern is fully preserved afterwards; otherwise, the
4871 RTL expander won't be able to apply a subsequent transformation
4872 to the underlying constructor. */
4873 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
4874 {
4875 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
4876 return initializer_constant_valid_p_1 (src, endtype, cache);
4877 else
4878 return NULL_TREE;
4879 }
4880
4881 /* Allow all other kinds of view-conversion. */
4882 return initializer_constant_valid_p_1 (src, endtype, cache);
4883 }
4884
4885 CASE_CONVERT:
4886 {
4887 tree src = TREE_OPERAND (value, 0);
4888 tree src_type = TREE_TYPE (src);
4889 tree dest_type = TREE_TYPE (value);
4890
4891 /* Allow conversions between pointer types, floating-point
4892 types, and offset types. */
4893 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type))
4894 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type))
4895 || (TREE_CODE (dest_type) == OFFSET_TYPE
4896 && TREE_CODE (src_type) == OFFSET_TYPE))
4897 return initializer_constant_valid_p_1 (src, endtype, cache);
4898
4899 /* Allow length-preserving conversions between integer types. */
4900 if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)
4901 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type)))
4902 return initializer_constant_valid_p_1 (src, endtype, cache);
4903
4904 /* Allow conversions between other integer types only if
4905 explicit value. Don't allow sign-extension to a type larger
4906 than word and pointer, there aren't relocations that would
4907 allow to sign extend it to a wider type. */
4908 if (INTEGRAL_TYPE_P (dest_type)
4909 && INTEGRAL_TYPE_P (src_type)
4910 && (TYPE_UNSIGNED (src_type)
4911 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
4912 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
4913 || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
4914 {
4915 tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
4916 if (inner == null_pointer_node)
4917 return null_pointer_node;
4918 break;
4919 }
4920
4921 /* Allow (int) &foo provided int is as wide as a pointer. */
4922 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)
4923 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)))
4924 return initializer_constant_valid_p_1 (src, endtype, cache);
4925
4926 /* Likewise conversions from int to pointers, but also allow
4927 conversions from 0. */
4928 if ((POINTER_TYPE_P (dest_type)
4929 || TREE_CODE (dest_type) == OFFSET_TYPE)
4930 && INTEGRAL_TYPE_P (src_type))
4931 {
4932 if (TREE_CODE (src) == INTEGER_CST
4933 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))
4934 return null_pointer_node;
4935 if (integer_zerop (src))
4936 return null_pointer_node;
4937 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type))
4938 return initializer_constant_valid_p_1 (src, endtype, cache);
4939 }
4940
4941 /* Allow conversions to struct or union types if the value
4942 inside is okay. */
4943 if (TREE_CODE (dest_type) == RECORD_TYPE
4944 || TREE_CODE (dest_type) == UNION_TYPE)
4945 return initializer_constant_valid_p_1 (src, endtype, cache);
4946 }
4947 break;
4948
4949 case POINTER_PLUS_EXPR:
4950 case PLUS_EXPR:
4951 /* Any valid floating-point constants will have been folded by now;
4952 with -frounding-math we hit this with addition of two constants. */
4953 if (TREE_CODE (endtype) == REAL_TYPE)
4954 return NULL_TREE;
4955 if (cache && cache[0] == value)
4956 return cache[1];
4957 if (! INTEGRAL_TYPE_P (endtype)
4958 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4959 {
4960 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4961 tree valid0
4962 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4963 endtype, ncache);
4964 tree valid1
4965 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4966 endtype, ncache + 2);
4967 /* If either term is absolute, use the other term's relocation. */
4968 if (valid0 == null_pointer_node)
4969 ret = valid1;
4970 else if (valid1 == null_pointer_node)
4971 ret = valid0;
4972 /* Support narrowing pointer differences. */
4973 else
4974 ret = narrowing_initializer_constant_valid_p (value, endtype,
4975 ncache);
4976 }
4977 else
4978 /* Support narrowing pointer differences. */
4979 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4980 if (cache)
4981 {
4982 cache[0] = value;
4983 cache[1] = ret;
4984 }
4985 return ret;
4986
4987 case POINTER_DIFF_EXPR:
4988 case MINUS_EXPR:
4989 if (TREE_CODE (endtype) == REAL_TYPE)
4990 return NULL_TREE;
4991 if (cache && cache[0] == value)
4992 return cache[1];
4993 if (! INTEGRAL_TYPE_P (endtype)
4994 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4995 {
4996 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4997 tree valid0
4998 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4999 endtype, ncache);
5000 tree valid1
5001 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
5002 endtype, ncache + 2);
5003 /* Win if second argument is absolute. */
5004 if (valid1 == null_pointer_node)
5005 ret = valid0;
5006 /* Win if both arguments have the same relocation.
5007 Then the value is absolute. */
5008 else if (valid0 == valid1 && valid0 != 0)
5009 ret = null_pointer_node;
5010 /* Since GCC guarantees that string constants are unique in the
5011 generated code, a subtraction between two copies of the same
5012 constant string is absolute. */
5013 else if (valid0 && TREE_CODE (valid0) == STRING_CST
5014 && valid1 && TREE_CODE (valid1) == STRING_CST
5015 && operand_equal_p (valid0, valid1, 1))
5016 ret = null_pointer_node;
5017 /* Support narrowing differences. */
5018 else
5019 ret = narrowing_initializer_constant_valid_p (value, endtype,
5020 ncache);
5021 }
5022 else
5023 /* Support narrowing differences. */
5024 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
5025 if (cache)
5026 {
5027 cache[0] = value;
5028 cache[1] = ret;
5029 }
5030 return ret;
5031
5032 default:
5033 break;
5034 }
5035
5036 return NULL_TREE;
5037 }
5038
5039 /* Return nonzero if VALUE is a valid constant-valued expression
5040 for use in initializing a static variable; one that can be an
5041 element of a "constant" initializer.
5042
5043 Return null_pointer_node if the value is absolute;
5044 if it is relocatable, return the variable that determines the relocation.
5045 We assume that VALUE has been folded as much as possible;
5046 therefore, we do not need to check for such things as
5047 arithmetic-combinations of integers. */
5048 tree
5049 initializer_constant_valid_p (tree value, tree endtype, bool reverse)
5050 {
5051 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL);
5052
5053 /* An absolute value is required with reverse storage order. */
5054 if (reloc
5055 && reloc != null_pointer_node
5056 && reverse
5057 && !AGGREGATE_TYPE_P (endtype)
5058 && !VECTOR_TYPE_P (endtype))
5059 reloc = NULL_TREE;
5060
5061 return reloc;
5062 }
5063 \f
5064 /* Return true if VALUE is a valid constant-valued expression
5065 for use in initializing a static bit-field; one that can be
5066 an element of a "constant" initializer. */
5067
5068 bool
5069 initializer_constant_valid_for_bitfield_p (tree value)
5070 {
5071 /* For bitfields we support integer constants or possibly nested aggregates
5072 of such. */
5073 switch (TREE_CODE (value))
5074 {
5075 case CONSTRUCTOR:
5076 {
5077 unsigned HOST_WIDE_INT idx;
5078 tree elt;
5079
5080 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
5081 if (!initializer_constant_valid_for_bitfield_p (elt))
5082 return false;
5083 return true;
5084 }
5085
5086 case INTEGER_CST:
5087 case REAL_CST:
5088 return true;
5089
5090 case VIEW_CONVERT_EXPR:
5091 case NON_LVALUE_EXPR:
5092 return
5093 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
5094
5095 default:
5096 break;
5097 }
5098
5099 return false;
5100 }
5101
5102 /* Check if a STRING_CST fits into the field.
5103 Tolerate only the case when the NUL termination
5104 does not fit into the field. */
5105
5106 static bool
5107 check_string_literal (tree string, unsigned HOST_WIDE_INT size)
5108 {
5109 tree type = TREE_TYPE (string);
5110 tree eltype = TREE_TYPE (type);
5111 unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
5112 unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
5113 int len = TREE_STRING_LENGTH (string);
5114
5115 if (elts != 1 && elts != 2 && elts != 4)
5116 return false;
5117 if (len < 0 || len % elts != 0)
5118 return false;
5119 if (size < (unsigned)len)
5120 return false;
5121 if (mem_size != size)
5122 return false;
5123 return true;
5124 }
5125
5126 /* output_constructor outer state of relevance in recursive calls, typically
5127 for nested aggregate bitfields. */
5128
5129 struct oc_outer_state {
5130 unsigned int bit_offset; /* current position in ... */
5131 int byte; /* ... the outer byte buffer. */
5132 };
5133
5134 static unsigned HOST_WIDE_INT
5135 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool,
5136 oc_outer_state *);
5137
5138 /* Output assembler code for constant EXP, with no label.
5139 This includes the pseudo-op such as ".int" or ".byte", and a newline.
5140 Assumes output_addressed_constants has been done on EXP already.
5141
5142 Generate at least SIZE bytes of assembler data, padding at the end
5143 with zeros if necessary. SIZE must always be specified. The returned
5144 value is the actual number of bytes of assembler data generated, which
5145 may be bigger than SIZE if the object contains a variable length field.
5146
5147 SIZE is important for structure constructors,
5148 since trailing members may have been omitted from the constructor.
5149 It is also important for initialization of arrays from string constants
5150 since the full length of the string constant might not be wanted.
5151 It is also needed for initialization of unions, where the initializer's
5152 type is just one member, and that may not be as long as the union.
5153
5154 There a case in which we would fail to output exactly SIZE bytes:
5155 for a structure constructor that wants to produce more than SIZE bytes.
5156 But such constructors will never be generated for any possible input.
5157
5158 ALIGN is the alignment of the data in bits.
5159
5160 If REVERSE is true, EXP is output in reverse storage order. */
5161
5162 static unsigned HOST_WIDE_INT
5163 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5164 bool reverse, bool merge_strings)
5165 {
5166 enum tree_code code;
5167 unsigned HOST_WIDE_INT thissize;
5168 rtx cst;
5169
5170 if (size == 0 || flag_syntax_only)
5171 return size;
5172
5173 /* See if we're trying to initialize a pointer in a non-default mode
5174 to the address of some declaration somewhere. If the target says
5175 the mode is valid for pointers, assume the target has a way of
5176 resolving it. */
5177 if (TREE_CODE (exp) == NOP_EXPR
5178 && POINTER_TYPE_P (TREE_TYPE (exp))
5179 && targetm.addr_space.valid_pointer_mode
5180 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5181 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5182 {
5183 tree saved_type = TREE_TYPE (exp);
5184
5185 /* Peel off any intermediate conversions-to-pointer for valid
5186 pointer modes. */
5187 while (TREE_CODE (exp) == NOP_EXPR
5188 && POINTER_TYPE_P (TREE_TYPE (exp))
5189 && targetm.addr_space.valid_pointer_mode
5190 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5191 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5192 exp = TREE_OPERAND (exp, 0);
5193
5194 /* If what we're left with is the address of something, we can
5195 convert the address to the final type and output it that
5196 way. */
5197 if (TREE_CODE (exp) == ADDR_EXPR)
5198 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
5199 /* Likewise for constant ints. */
5200 else if (TREE_CODE (exp) == INTEGER_CST)
5201 exp = fold_convert (saved_type, exp);
5202
5203 }
5204
5205 /* Eliminate any conversions since we'll be outputting the underlying
5206 constant. */
5207 while (CONVERT_EXPR_P (exp)
5208 || TREE_CODE (exp) == NON_LVALUE_EXPR
5209 || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
5210 {
5211 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp));
5212 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0)));
5213
5214 /* Make sure eliminating the conversion is really a no-op, except with
5215 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and
5216 union types to allow for Ada unchecked unions. */
5217 if (type_size > op_size
5218 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5219 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE)
5220 /* Keep the conversion. */
5221 break;
5222 else
5223 exp = TREE_OPERAND (exp, 0);
5224 }
5225
5226 code = TREE_CODE (TREE_TYPE (exp));
5227 thissize = int_size_in_bytes (TREE_TYPE (exp));
5228
5229 /* Allow a constructor with no elements for any data type.
5230 This means to fill the space with zeros. */
5231 if (TREE_CODE (exp) == CONSTRUCTOR
5232 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
5233 {
5234 assemble_zeros (size);
5235 return size;
5236 }
5237
5238 if (TREE_CODE (exp) == FDESC_EXPR)
5239 {
5240 #ifdef ASM_OUTPUT_FDESC
5241 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
5242 tree decl = TREE_OPERAND (exp, 0);
5243 ASM_OUTPUT_FDESC (asm_out_file, decl, part);
5244 #else
5245 gcc_unreachable ();
5246 #endif
5247 return size;
5248 }
5249
5250 /* Now output the underlying data. If we've handling the padding, return.
5251 Otherwise, break and ensure SIZE is the size written. */
5252 switch (code)
5253 {
5254 case BOOLEAN_TYPE:
5255 case INTEGER_TYPE:
5256 case ENUMERAL_TYPE:
5257 case POINTER_TYPE:
5258 case REFERENCE_TYPE:
5259 case OFFSET_TYPE:
5260 case FIXED_POINT_TYPE:
5261 case NULLPTR_TYPE:
5262 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
5263 if (reverse)
5264 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst);
5265 if (!assemble_integer (cst, MIN (size, thissize), align, 0))
5266 error ("initializer for integer/fixed-point value is too complicated");
5267 break;
5268
5269 case REAL_TYPE:
5270 if (TREE_CODE (exp) != REAL_CST)
5271 error ("initializer for floating value is not a floating constant");
5272 else
5273 assemble_real (TREE_REAL_CST (exp),
5274 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
5275 align, reverse);
5276 break;
5277
5278 case COMPLEX_TYPE:
5279 output_constant (TREE_REALPART (exp), thissize / 2, align,
5280 reverse, false);
5281 output_constant (TREE_IMAGPART (exp), thissize / 2,
5282 min_align (align, BITS_PER_UNIT * (thissize / 2)),
5283 reverse, false);
5284 break;
5285
5286 case ARRAY_TYPE:
5287 case VECTOR_TYPE:
5288 switch (TREE_CODE (exp))
5289 {
5290 case CONSTRUCTOR:
5291 return output_constructor (exp, size, align, reverse, NULL);
5292 case STRING_CST:
5293 thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp);
5294 if (merge_strings
5295 && (thissize == 0
5296 || TREE_STRING_POINTER (exp) [thissize - 1] != '\0'))
5297 thissize++;
5298 gcc_checking_assert (check_string_literal (exp, size));
5299 assemble_string (TREE_STRING_POINTER (exp), thissize);
5300 break;
5301 case VECTOR_CST:
5302 {
5303 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5304 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
5305 int elt_size = GET_MODE_SIZE (inner);
5306 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
5307 reverse, false);
5308 thissize = elt_size;
5309 /* Static constants must have a fixed size. */
5310 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
5311 for (unsigned int i = 1; i < nunits; i++)
5312 {
5313 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
5314 reverse, false);
5315 thissize += elt_size;
5316 }
5317 break;
5318 }
5319 default:
5320 gcc_unreachable ();
5321 }
5322 break;
5323
5324 case RECORD_TYPE:
5325 case UNION_TYPE:
5326 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
5327 return output_constructor (exp, size, align, reverse, NULL);
5328
5329 case ERROR_MARK:
5330 return 0;
5331
5332 default:
5333 gcc_unreachable ();
5334 }
5335
5336 if (size > thissize)
5337 assemble_zeros (size - thissize);
5338
5339 return size;
5340 }
5341 \f
5342 /* Subroutine of output_constructor, used for computing the size of
5343 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array
5344 type with an unspecified upper bound. */
5345
5346 static unsigned HOST_WIDE_INT
5347 array_size_for_constructor (tree val)
5348 {
5349 tree max_index;
5350 unsigned HOST_WIDE_INT cnt;
5351 tree index, value, tmp;
5352 offset_int i;
5353
5354 /* This code used to attempt to handle string constants that are not
5355 arrays of single-bytes, but nothing else does, so there's no point in
5356 doing it here. */
5357 if (TREE_CODE (val) == STRING_CST)
5358 return TREE_STRING_LENGTH (val);
5359
5360 max_index = NULL_TREE;
5361 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value)
5362 {
5363 if (TREE_CODE (index) == RANGE_EXPR)
5364 index = TREE_OPERAND (index, 1);
5365 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index))
5366 max_index = index;
5367 }
5368
5369 if (max_index == NULL_TREE)
5370 return 0;
5371
5372 /* Compute the total number of array elements. */
5373 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
5374 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
5375
5376 /* Multiply by the array element unit size to find number of bytes. */
5377 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
5378
5379 gcc_assert (wi::fits_uhwi_p (i));
5380 return i.to_uhwi ();
5381 }
5382
5383 /* Other datastructures + helpers for output_constructor. */
5384
5385 /* output_constructor local state to support interaction with helpers. */
5386
5387 struct oc_local_state {
5388
5389 /* Received arguments. */
5390 tree exp; /* Constructor expression. */
5391 tree type; /* Type of constructor expression. */
5392 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
5393 unsigned int align; /* Known initial alignment. */
5394 tree min_index; /* Lower bound if specified for an array. */
5395
5396 /* Output processing state. */
5397 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
5398 int byte; /* Part of a bitfield byte yet to be output. */
5399 int last_relative_index; /* Implicit or explicit index of the last
5400 array element output within a bitfield. */
5401 bool byte_buffer_in_use; /* Whether BYTE is in use. */
5402 bool reverse; /* Whether reverse storage order is in use. */
5403
5404 /* Current element. */
5405 tree field; /* Current field decl in a record. */
5406 tree val; /* Current element value. */
5407 tree index; /* Current element index. */
5408
5409 };
5410
5411 /* Helper for output_constructor. From the current LOCAL state, output a
5412 RANGE_EXPR element. */
5413
5414 static void
5415 output_constructor_array_range (oc_local_state *local)
5416 {
5417 /* Perform the index calculation in modulo arithmetic but
5418 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5419 but we are using an unsigned sizetype. */
5420 unsigned prec = TYPE_PRECISION (sizetype);
5421 offset_int idx = wi::sext (wi::to_offset (TREE_OPERAND (local->index, 0))
5422 - wi::to_offset (local->min_index), prec);
5423 tree valtype = TREE_TYPE (local->val);
5424 HOST_WIDE_INT fieldpos
5425 = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr ();
5426
5427 /* Advance to offset of this element. */
5428 if (fieldpos > local->total_bytes)
5429 {
5430 assemble_zeros (fieldpos - local->total_bytes);
5431 local->total_bytes = fieldpos;
5432 }
5433 else
5434 /* Must not go backwards. */
5435 gcc_assert (fieldpos == local->total_bytes);
5436
5437 unsigned HOST_WIDE_INT fieldsize
5438 = int_size_in_bytes (TREE_TYPE (local->type));
5439
5440 HOST_WIDE_INT lo_index
5441 = tree_to_shwi (TREE_OPERAND (local->index, 0));
5442 HOST_WIDE_INT hi_index
5443 = tree_to_shwi (TREE_OPERAND (local->index, 1));
5444 HOST_WIDE_INT index;
5445
5446 unsigned int align2
5447 = min_align (local->align, fieldsize * BITS_PER_UNIT);
5448
5449 for (index = lo_index; index <= hi_index; index++)
5450 {
5451 /* Output the element's initial value. */
5452 if (local->val == NULL_TREE)
5453 assemble_zeros (fieldsize);
5454 else
5455 fieldsize = output_constant (local->val, fieldsize, align2,
5456 local->reverse, false);
5457
5458 /* Count its size. */
5459 local->total_bytes += fieldsize;
5460 }
5461 }
5462
5463 /* Helper for output_constructor. From the current LOCAL state, output a
5464 field element that is not true bitfield or part of an outer one. */
5465
5466 static void
5467 output_constructor_regular_field (oc_local_state *local)
5468 {
5469 /* Field size and position. Since this structure is static, we know the
5470 positions are constant. */
5471 unsigned HOST_WIDE_INT fieldsize;
5472 HOST_WIDE_INT fieldpos;
5473
5474 unsigned int align2;
5475
5476 /* Output any buffered-up bit-fields preceding this element. */
5477 if (local->byte_buffer_in_use)
5478 {
5479 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5480 local->total_bytes++;
5481 local->byte_buffer_in_use = false;
5482 }
5483
5484 if (local->index != NULL_TREE)
5485 {
5486 /* Perform the index calculation in modulo arithmetic but
5487 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5488 but we are using an unsigned sizetype. */
5489 unsigned prec = TYPE_PRECISION (sizetype);
5490 offset_int idx = wi::sext (wi::to_offset (local->index)
5491 - wi::to_offset (local->min_index), prec);
5492 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
5493 .to_short_addr ();
5494 }
5495 else if (local->field != NULL_TREE)
5496 fieldpos = int_byte_position (local->field);
5497 else
5498 fieldpos = 0;
5499
5500 /* Advance to offset of this element.
5501 Note no alignment needed in an array, since that is guaranteed
5502 if each element has the proper size. */
5503 if (local->field != NULL_TREE || local->index != NULL_TREE)
5504 {
5505 if (fieldpos > local->total_bytes)
5506 {
5507 assemble_zeros (fieldpos - local->total_bytes);
5508 local->total_bytes = fieldpos;
5509 }
5510 else
5511 /* Must not go backwards. */
5512 gcc_assert (fieldpos == local->total_bytes);
5513 }
5514
5515 /* Find the alignment of this element. */
5516 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
5517
5518 /* Determine size this element should occupy. */
5519 if (local->field)
5520 {
5521 fieldsize = 0;
5522
5523 /* If this is an array with an unspecified upper bound,
5524 the initializer determines the size. */
5525 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
5526 but we cannot do this until the deprecated support for
5527 initializing zero-length array members is removed. */
5528 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
5529 && (!TYPE_DOMAIN (TREE_TYPE (local->field))
5530 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field)))))
5531 {
5532 unsigned HOST_WIDE_INT fldsize
5533 = array_size_for_constructor (local->val);
5534 fieldsize = int_size_in_bytes (TREE_TYPE (local->val));
5535 /* In most cases fieldsize == fldsize as the size of the initializer
5536 determines how many elements the flexible array member has. For
5537 C++ fldsize can be smaller though, if the last or several last or
5538 all initializers of the flexible array member have side-effects
5539 and the FE splits them into dynamic initialization. */
5540 gcc_checking_assert (fieldsize >= fldsize);
5541 /* Given a non-empty initialization, this field had better
5542 be last. Given a flexible array member, the next field
5543 on the chain is a TYPE_DECL of the enclosing struct. */
5544 const_tree next = DECL_CHAIN (local->field);
5545 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
5546 }
5547 else
5548 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
5549 }
5550 else
5551 fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
5552
5553 /* Output the element's initial value. */
5554 if (local->val == NULL_TREE)
5555 assemble_zeros (fieldsize);
5556 else
5557 fieldsize = output_constant (local->val, fieldsize, align2,
5558 local->reverse, false);
5559
5560 /* Count its size. */
5561 local->total_bytes += fieldsize;
5562 }
5563
5564 /* Helper for output_constructor. From the LOCAL state, output an element
5565 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset
5566 from the start of a possibly ongoing outer byte buffer. */
5567
5568 static void
5569 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
5570 {
5571 /* Bit size of this element. */
5572 HOST_WIDE_INT ebitsize
5573 = (local->field
5574 ? tree_to_uhwi (DECL_SIZE (local->field))
5575 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
5576
5577 /* Relative index of this element if this is an array component. */
5578 HOST_WIDE_INT relative_index
5579 = (!local->field
5580 ? (local->index
5581 ? (tree_to_shwi (local->index)
5582 - tree_to_shwi (local->min_index))
5583 : local->last_relative_index + 1)
5584 : 0);
5585
5586 /* Bit position of this element from the start of the containing
5587 constructor. */
5588 HOST_WIDE_INT constructor_relative_ebitpos
5589 = (local->field
5590 ? int_bit_position (local->field)
5591 : ebitsize * relative_index);
5592
5593 /* Bit position of this element from the start of a possibly ongoing
5594 outer byte buffer. */
5595 HOST_WIDE_INT byte_relative_ebitpos
5596 = bit_offset + constructor_relative_ebitpos;
5597
5598 /* From the start of a possibly ongoing outer byte buffer, offsets to
5599 the first bit of this element and to the first bit past the end of
5600 this element. */
5601 HOST_WIDE_INT next_offset = byte_relative_ebitpos;
5602 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
5603
5604 local->last_relative_index = relative_index;
5605
5606 if (local->val == NULL_TREE)
5607 local->val = integer_zero_node;
5608
5609 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
5610 || TREE_CODE (local->val) == NON_LVALUE_EXPR)
5611 local->val = TREE_OPERAND (local->val, 0);
5612
5613 if (TREE_CODE (local->val) != INTEGER_CST
5614 && TREE_CODE (local->val) != CONSTRUCTOR)
5615 {
5616 error ("invalid initial value for member %qE", DECL_NAME (local->field));
5617 return;
5618 }
5619
5620 /* If this field does not start in this (or next) byte, skip some bytes. */
5621 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5622 {
5623 /* Output remnant of any bit field in previous bytes. */
5624 if (local->byte_buffer_in_use)
5625 {
5626 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5627 local->total_bytes++;
5628 local->byte_buffer_in_use = false;
5629 }
5630
5631 /* If still not at proper byte, advance to there. */
5632 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5633 {
5634 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
5635 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
5636 local->total_bytes = next_offset / BITS_PER_UNIT;
5637 }
5638 }
5639
5640 /* Set up the buffer if necessary. */
5641 if (!local->byte_buffer_in_use)
5642 {
5643 local->byte = 0;
5644 if (ebitsize > 0)
5645 local->byte_buffer_in_use = true;
5646 }
5647
5648 /* If this is nested constructor, recurse passing the bit offset and the
5649 pending data, then retrieve the new pending data afterwards. */
5650 if (TREE_CODE (local->val) == CONSTRUCTOR)
5651 {
5652 oc_outer_state temp_state;
5653 temp_state.bit_offset = next_offset % BITS_PER_UNIT;
5654 temp_state.byte = local->byte;
5655 local->total_bytes
5656 += output_constructor (local->val, 0, 0, local->reverse, &temp_state);
5657 local->byte = temp_state.byte;
5658 return;
5659 }
5660
5661 /* Otherwise, we must split the element into pieces that fall within
5662 separate bytes, and combine each byte with previous or following
5663 bit-fields. */
5664 while (next_offset < end_offset)
5665 {
5666 int this_time;
5667 int shift;
5668 unsigned HOST_WIDE_INT value;
5669 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
5670 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
5671
5672 /* Advance from byte to byte within this element when necessary. */
5673 while (next_byte != local->total_bytes)
5674 {
5675 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5676 local->total_bytes++;
5677 local->byte = 0;
5678 }
5679
5680 /* Number of bits we can process at once (all part of the same byte). */
5681 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit);
5682 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5683 {
5684 /* For big-endian data, take the most significant bits (of the
5685 bits that are significant) first and put them into bytes from
5686 the most significant end. */
5687 shift = end_offset - next_offset - this_time;
5688
5689 /* Don't try to take a bunch of bits that cross
5690 the word boundary in the INTEGER_CST. We can
5691 only select bits from one element. */
5692 if ((shift / HOST_BITS_PER_WIDE_INT)
5693 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5694 {
5695 const int end = shift + this_time - 1;
5696 shift = end & -HOST_BITS_PER_WIDE_INT;
5697 this_time = end - shift + 1;
5698 }
5699
5700 /* Now get the bits we want to insert. */
5701 value = wi::extract_uhwi (wi::to_widest (local->val),
5702 shift, this_time);
5703
5704 /* Get the result. This works only when:
5705 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5706 local->byte |= value << (BITS_PER_UNIT - this_time - next_bit);
5707 }
5708 else
5709 {
5710 /* On little-endian machines, take the least significant bits of
5711 the value first and pack them starting at the least significant
5712 bits of the bytes. */
5713 shift = next_offset - byte_relative_ebitpos;
5714
5715 /* Don't try to take a bunch of bits that cross
5716 the word boundary in the INTEGER_CST. We can
5717 only select bits from one element. */
5718 if ((shift / HOST_BITS_PER_WIDE_INT)
5719 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5720 this_time
5721 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
5722
5723 /* Now get the bits we want to insert. */
5724 value = wi::extract_uhwi (wi::to_widest (local->val),
5725 shift, this_time);
5726
5727 /* Get the result. This works only when:
5728 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5729 local->byte |= value << next_bit;
5730 }
5731
5732 next_offset += this_time;
5733 local->byte_buffer_in_use = true;
5734 }
5735 }
5736
5737 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
5738 Generate at least SIZE bytes, padding if necessary. OUTER designates the
5739 caller output state of relevance in recursive invocations. */
5740
5741 static unsigned HOST_WIDE_INT
5742 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5743 bool reverse, oc_outer_state *outer)
5744 {
5745 unsigned HOST_WIDE_INT cnt;
5746 constructor_elt *ce;
5747 oc_local_state local;
5748
5749 /* Setup our local state to communicate with helpers. */
5750 local.exp = exp;
5751 local.type = TREE_TYPE (exp);
5752 local.size = size;
5753 local.align = align;
5754 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type))
5755 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
5756 else
5757 local.min_index = integer_zero_node;
5758
5759 local.total_bytes = 0;
5760 local.byte_buffer_in_use = outer != NULL;
5761 local.byte = outer ? outer->byte : 0;
5762 local.last_relative_index = -1;
5763 /* The storage order is specified for every aggregate type. */
5764 if (AGGREGATE_TYPE_P (local.type))
5765 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type);
5766 else
5767 local.reverse = reverse;
5768
5769 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
5770
5771 /* As CE goes through the elements of the constant, FIELD goes through the
5772 structure fields if the constant is a structure. If the constant is a
5773 union, we override this by getting the field from the TREE_LIST element.
5774 But the constant could also be an array. Then FIELD is zero.
5775
5776 There is always a maximum of one element in the chain LINK for unions
5777 (even if the initializer in a source program incorrectly contains
5778 more one). */
5779
5780 if (TREE_CODE (local.type) == RECORD_TYPE)
5781 local.field = TYPE_FIELDS (local.type);
5782 else
5783 local.field = NULL_TREE;
5784
5785 for (cnt = 0;
5786 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
5787 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
5788 {
5789 local.val = ce->value;
5790 local.index = NULL_TREE;
5791
5792 /* The element in a union constructor specifies the proper field
5793 or index. */
5794 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE)
5795 local.field = ce->index;
5796
5797 else if (TREE_CODE (local.type) == ARRAY_TYPE)
5798 local.index = ce->index;
5799
5800 if (local.field && flag_verbose_asm)
5801 fprintf (asm_out_file, "%s %s:\n",
5802 ASM_COMMENT_START,
5803 DECL_NAME (local.field)
5804 ? IDENTIFIER_POINTER (DECL_NAME (local.field))
5805 : "<anonymous>");
5806
5807 /* Eliminate the marker that makes a cast not be an lvalue. */
5808 if (local.val != NULL_TREE)
5809 STRIP_NOPS (local.val);
5810
5811 /* Output the current element, using the appropriate helper ... */
5812
5813 /* For an array slice not part of an outer bitfield. */
5814 if (!outer
5815 && local.index != NULL_TREE
5816 && TREE_CODE (local.index) == RANGE_EXPR)
5817 output_constructor_array_range (&local);
5818
5819 /* For a field that is neither a true bitfield nor part of an outer one,
5820 known to be at least byte aligned and multiple-of-bytes long. */
5821 else if (!outer
5822 && (local.field == NULL_TREE
5823 || !CONSTRUCTOR_BITFIELD_P (local.field)))
5824 output_constructor_regular_field (&local);
5825
5826 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are
5827 supported for scalar fields, so we may need to convert first. */
5828 else
5829 {
5830 if (TREE_CODE (local.val) == REAL_CST)
5831 local.val
5832 = fold_unary (VIEW_CONVERT_EXPR,
5833 build_nonstandard_integer_type
5834 (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
5835 local.val);
5836 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0);
5837 }
5838 }
5839
5840 /* If we are not at toplevel, save the pending data for our caller.
5841 Otherwise output the pending data and padding zeros as needed. */
5842 if (outer)
5843 outer->byte = local.byte;
5844 else
5845 {
5846 if (local.byte_buffer_in_use)
5847 {
5848 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
5849 local.total_bytes++;
5850 }
5851
5852 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
5853 {
5854 assemble_zeros (local.size - local.total_bytes);
5855 local.total_bytes = local.size;
5856 }
5857 }
5858
5859 return local.total_bytes;
5860 }
5861
5862 /* Mark DECL as weak. */
5863
5864 static void
5865 mark_weak (tree decl)
5866 {
5867 if (DECL_WEAK (decl))
5868 return;
5869
5870 struct symtab_node *n = symtab_node::get (decl);
5871 if (n && n->refuse_visibility_changes)
5872 error ("%qD declared weak after being used", decl);
5873 DECL_WEAK (decl) = 1;
5874
5875 if (DECL_RTL_SET_P (decl)
5876 && MEM_P (DECL_RTL (decl))
5877 && XEXP (DECL_RTL (decl), 0)
5878 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
5879 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
5880 }
5881
5882 /* Merge weak status between NEWDECL and OLDDECL. */
5883
5884 void
5885 merge_weak (tree newdecl, tree olddecl)
5886 {
5887 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl))
5888 {
5889 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK)
5890 {
5891 tree *pwd;
5892 /* We put the NEWDECL on the weak_decls list at some point
5893 and OLDDECL as well. Keep just OLDDECL on the list. */
5894 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd))
5895 if (TREE_VALUE (*pwd) == newdecl)
5896 {
5897 *pwd = TREE_CHAIN (*pwd);
5898 break;
5899 }
5900 }
5901 return;
5902 }
5903
5904 if (DECL_WEAK (newdecl))
5905 {
5906 tree wd;
5907
5908 /* NEWDECL is weak, but OLDDECL is not. */
5909
5910 /* If we already output the OLDDECL, we're in trouble; we can't
5911 go back and make it weak. This should never happen in
5912 unit-at-a-time compilation. */
5913 gcc_assert (!TREE_ASM_WRITTEN (olddecl));
5914
5915 /* If we've already generated rtl referencing OLDDECL, we may
5916 have done so in a way that will not function properly with
5917 a weak symbol. Again in unit-at-a-time this should be
5918 impossible. */
5919 gcc_assert (!TREE_USED (olddecl)
5920 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl)));
5921
5922 /* PR 49899: You cannot convert a static function into a weak, public function. */
5923 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl))
5924 error ("weak declaration of %q+D being applied to a already "
5925 "existing, static definition", newdecl);
5926
5927 if (TARGET_SUPPORTS_WEAK)
5928 {
5929 /* We put the NEWDECL on the weak_decls list at some point.
5930 Replace it with the OLDDECL. */
5931 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd))
5932 if (TREE_VALUE (wd) == newdecl)
5933 {
5934 TREE_VALUE (wd) = olddecl;
5935 break;
5936 }
5937 /* We may not find the entry on the list. If NEWDECL is a
5938 weak alias, then we will have already called
5939 globalize_decl to remove the entry; in that case, we do
5940 not need to do anything. */
5941 }
5942
5943 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */
5944 mark_weak (olddecl);
5945 }
5946 else
5947 /* OLDDECL was weak, but NEWDECL was not explicitly marked as
5948 weak. Just update NEWDECL to indicate that it's weak too. */
5949 mark_weak (newdecl);
5950 }
5951
5952 /* Declare DECL to be a weak symbol. */
5953
5954 void
5955 declare_weak (tree decl)
5956 {
5957 /* With -fsyntax-only, TREE_ASM_WRITTEN might be set on certain function
5958 decls earlier than normally, but as with -fsyntax-only nothing is really
5959 emitted, there is no harm in marking it weak later. */
5960 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL
5961 || !TREE_ASM_WRITTEN (decl)
5962 || flag_syntax_only);
5963 if (! TREE_PUBLIC (decl))
5964 {
5965 error ("weak declaration of %q+D must be public", decl);
5966 return;
5967 }
5968 else if (!TARGET_SUPPORTS_WEAK)
5969 warning (0, "weak declaration of %q+D not supported", decl);
5970
5971 mark_weak (decl);
5972 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl)))
5973 DECL_ATTRIBUTES (decl)
5974 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl));
5975 }
5976
5977 static void
5978 weak_finish_1 (tree decl)
5979 {
5980 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL)
5981 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5982 #endif
5983
5984 if (! TREE_USED (decl))
5985 return;
5986
5987 #ifdef ASM_WEAKEN_DECL
5988 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL);
5989 #else
5990 #ifdef ASM_WEAKEN_LABEL
5991 ASM_WEAKEN_LABEL (asm_out_file, name);
5992 #else
5993 #ifdef ASM_OUTPUT_WEAK_ALIAS
5994 {
5995 static bool warn_once = 0;
5996 if (! warn_once)
5997 {
5998 warning (0, "only weak aliases are supported in this configuration");
5999 warn_once = 1;
6000 }
6001 return;
6002 }
6003 #endif
6004 #endif
6005 #endif
6006 }
6007
6008 /* Fiven an assembly name, find the decl it is associated with. */
6009 static tree
6010 find_decl (tree target)
6011 {
6012 symtab_node *node = symtab_node::get_for_asmname (target);
6013 if (node)
6014 return node->decl;
6015 return NULL_TREE;
6016 }
6017
6018 /* This TREE_LIST contains weakref targets. */
6019
6020 static GTY(()) tree weakref_targets;
6021
6022 /* Emit any pending weak declarations. */
6023
6024 void
6025 weak_finish (void)
6026 {
6027 tree t;
6028
6029 for (t = weakref_targets; t; t = TREE_CHAIN (t))
6030 {
6031 tree alias_decl = TREE_PURPOSE (t);
6032 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
6033
6034 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl))
6035 || TREE_SYMBOL_REFERENCED (target))
6036 /* Remove alias_decl from the weak list, but leave entries for
6037 the target alone. */
6038 target = NULL_TREE;
6039 #ifndef ASM_OUTPUT_WEAKREF
6040 else if (! TREE_SYMBOL_REFERENCED (target))
6041 {
6042 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
6043 defined, otherwise we and weak_finish_1 would use
6044 different macros. */
6045 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
6046 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
6047 # else
6048 tree decl = find_decl (target);
6049
6050 if (! decl)
6051 {
6052 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
6053 TREE_CODE (alias_decl), target,
6054 TREE_TYPE (alias_decl));
6055
6056 DECL_EXTERNAL (decl) = 1;
6057 TREE_PUBLIC (decl) = 1;
6058 DECL_ARTIFICIAL (decl) = 1;
6059 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl);
6060 TREE_USED (decl) = 1;
6061 }
6062
6063 weak_finish_1 (decl);
6064 # endif
6065 }
6066 #endif
6067
6068 {
6069 tree *p;
6070 tree t2;
6071
6072 /* Remove the alias and the target from the pending weak list
6073 so that we do not emit any .weak directives for the former,
6074 nor multiple .weak directives for the latter. */
6075 for (p = &weak_decls; (t2 = *p) ; )
6076 {
6077 if (TREE_VALUE (t2) == alias_decl
6078 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2)))
6079 *p = TREE_CHAIN (t2);
6080 else
6081 p = &TREE_CHAIN (t2);
6082 }
6083
6084 /* Remove other weakrefs to the same target, to speed things up. */
6085 for (p = &TREE_CHAIN (t); (t2 = *p) ; )
6086 {
6087 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2)))
6088 *p = TREE_CHAIN (t2);
6089 else
6090 p = &TREE_CHAIN (t2);
6091 }
6092 }
6093 }
6094
6095 for (t = weak_decls; t; t = TREE_CHAIN (t))
6096 {
6097 tree decl = TREE_VALUE (t);
6098
6099 weak_finish_1 (decl);
6100 }
6101 }
6102
6103 /* Emit the assembly bits to indicate that DECL is globally visible. */
6104
6105 static void
6106 globalize_decl (tree decl)
6107 {
6108
6109 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
6110 if (DECL_WEAK (decl))
6111 {
6112 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6113 tree *p, t;
6114
6115 #ifdef ASM_WEAKEN_DECL
6116 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0);
6117 #else
6118 ASM_WEAKEN_LABEL (asm_out_file, name);
6119 #endif
6120
6121 /* Remove this function from the pending weak list so that
6122 we do not emit multiple .weak directives for it. */
6123 for (p = &weak_decls; (t = *p) ; )
6124 {
6125 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6126 *p = TREE_CHAIN (t);
6127 else
6128 p = &TREE_CHAIN (t);
6129 }
6130
6131 /* Remove weakrefs to the same target from the pending weakref
6132 list, for the same reason. */
6133 for (p = &weakref_targets; (t = *p) ; )
6134 {
6135 if (DECL_ASSEMBLER_NAME (decl)
6136 == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6137 *p = TREE_CHAIN (t);
6138 else
6139 p = &TREE_CHAIN (t);
6140 }
6141
6142 return;
6143 }
6144 #endif
6145
6146 targetm.asm_out.globalize_decl_name (asm_out_file, decl);
6147 }
6148
6149 vec<alias_pair, va_gc> *alias_pairs;
6150
6151 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
6152 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
6153 tree node is DECL to have the value of the tree node TARGET. */
6154
6155 void
6156 do_assemble_alias (tree decl, tree target)
6157 {
6158 tree id;
6159
6160 /* Emulated TLS had better not get this var. */
6161 gcc_assert (!(!targetm.have_tls
6162 && VAR_P (decl)
6163 && DECL_THREAD_LOCAL_P (decl)));
6164
6165 if (TREE_ASM_WRITTEN (decl))
6166 return;
6167
6168 id = DECL_ASSEMBLER_NAME (decl);
6169 ultimate_transparent_alias_target (&id);
6170 ultimate_transparent_alias_target (&target);
6171
6172 /* We must force creation of DECL_RTL for debug info generation, even though
6173 we don't use it here. */
6174 make_decl_rtl (decl);
6175
6176 TREE_ASM_WRITTEN (decl) = 1;
6177 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1;
6178 TREE_ASM_WRITTEN (id) = 1;
6179
6180 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6181 {
6182 if (!TREE_SYMBOL_REFERENCED (target))
6183 weakref_targets = tree_cons (decl, target, weakref_targets);
6184
6185 #ifdef ASM_OUTPUT_WEAKREF
6186 ASM_OUTPUT_WEAKREF (asm_out_file, decl,
6187 IDENTIFIER_POINTER (id),
6188 IDENTIFIER_POINTER (target));
6189 #else
6190 if (!TARGET_SUPPORTS_WEAK)
6191 {
6192 error_at (DECL_SOURCE_LOCATION (decl),
6193 "weakref is not supported in this configuration");
6194 return;
6195 }
6196 #endif
6197 return;
6198 }
6199
6200 #ifdef ASM_OUTPUT_DEF
6201 tree orig_decl = decl;
6202
6203 /* Make name accessible from other files, if appropriate. */
6204
6205 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
6206 {
6207 globalize_decl (decl);
6208 maybe_assemble_visibility (decl);
6209 }
6210 if (TREE_CODE (decl) == FUNCTION_DECL
6211 && cgraph_node::get (decl)->ifunc_resolver)
6212 {
6213 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
6214 if (targetm.has_ifunc_p ())
6215 ASM_OUTPUT_TYPE_DIRECTIVE
6216 (asm_out_file, IDENTIFIER_POINTER (id),
6217 IFUNC_ASM_TYPE);
6218 else
6219 #endif
6220 error_at (DECL_SOURCE_LOCATION (decl),
6221 "%qs is not supported on this target", "ifunc");
6222 }
6223
6224 # ifdef ASM_OUTPUT_DEF_FROM_DECLS
6225 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target);
6226 # else
6227 ASM_OUTPUT_DEF (asm_out_file,
6228 IDENTIFIER_POINTER (id),
6229 IDENTIFIER_POINTER (target));
6230 # endif
6231 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL)
6232 {
6233 const char *name;
6234 tree *p, t;
6235
6236 name = IDENTIFIER_POINTER (id);
6237 # ifdef ASM_WEAKEN_DECL
6238 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target));
6239 # else
6240 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target));
6241 # endif
6242 /* Remove this function from the pending weak list so that
6243 we do not emit multiple .weak directives for it. */
6244 for (p = &weak_decls; (t = *p) ; )
6245 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))
6246 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6247 *p = TREE_CHAIN (t);
6248 else
6249 p = &TREE_CHAIN (t);
6250
6251 /* Remove weakrefs to the same target from the pending weakref
6252 list, for the same reason. */
6253 for (p = &weakref_targets; (t = *p) ; )
6254 {
6255 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6256 *p = TREE_CHAIN (t);
6257 else
6258 p = &TREE_CHAIN (t);
6259 }
6260 }
6261 #endif
6262 }
6263
6264 /* Output .symver directive. */
6265
6266 void
6267 do_assemble_symver (tree decl, tree target)
6268 {
6269 tree id = DECL_ASSEMBLER_NAME (decl);
6270 ultimate_transparent_alias_target (&id);
6271 ultimate_transparent_alias_target (&target);
6272 #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE
6273 ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file,
6274 IDENTIFIER_POINTER (target),
6275 IDENTIFIER_POINTER (id));
6276 #else
6277 error ("symver is only supported on ELF platforms");
6278 #endif
6279 }
6280
6281 /* Emit an assembler directive to make the symbol for DECL an alias to
6282 the symbol for TARGET. */
6283
6284 void
6285 assemble_alias (tree decl, tree target)
6286 {
6287 tree target_decl;
6288
6289 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6290 {
6291 tree alias = DECL_ASSEMBLER_NAME (decl);
6292
6293 ultimate_transparent_alias_target (&target);
6294
6295 if (alias == target)
6296 error ("%qs symbol %q+D ultimately targets itself", "weakref", decl);
6297 if (TREE_PUBLIC (decl))
6298 error ("%qs symbol %q+D must have static linkage", "weakref", decl);
6299 }
6300 else
6301 {
6302 #if !defined (ASM_OUTPUT_DEF)
6303 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
6304 error_at (DECL_SOURCE_LOCATION (decl),
6305 "alias definitions not supported in this configuration");
6306 TREE_ASM_WRITTEN (decl) = 1;
6307 return;
6308 # else
6309 if (!DECL_WEAK (decl))
6310 {
6311 /* NB: ifunc_resolver isn't set when an error is detected. */
6312 if (TREE_CODE (decl) == FUNCTION_DECL
6313 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
6314 error_at (DECL_SOURCE_LOCATION (decl),
6315 "%qs is not supported in this configuration", "ifunc");
6316 else
6317 error_at (DECL_SOURCE_LOCATION (decl),
6318 "only weak aliases are supported in this configuration");
6319 TREE_ASM_WRITTEN (decl) = 1;
6320 return;
6321 }
6322 # endif
6323 #endif
6324 }
6325 TREE_USED (decl) = 1;
6326
6327 /* Allow aliases to aliases. */
6328 if (TREE_CODE (decl) == FUNCTION_DECL)
6329 cgraph_node::get_create (decl)->alias = true;
6330 else
6331 varpool_node::get_create (decl)->alias = true;
6332
6333 /* If the target has already been emitted, we don't have to queue the
6334 alias. This saves a tad of memory. */
6335 if (symtab->global_info_ready)
6336 target_decl = find_decl (target);
6337 else
6338 target_decl= NULL;
6339 if ((target_decl && TREE_ASM_WRITTEN (target_decl))
6340 || symtab->state >= EXPANSION)
6341 do_assemble_alias (decl, target);
6342 else
6343 {
6344 alias_pair p = {decl, target};
6345 vec_safe_push (alias_pairs, p);
6346 }
6347 }
6348
6349 /* Record and output a table of translations from original function
6350 to its transaction aware clone. Note that tm_pure functions are
6351 considered to be their own clone. */
6352
6353 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
6354 {
6355 static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
6356 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
6357
6358 static int
6359 keep_cache_entry (tree_map *&e)
6360 {
6361 return ggc_marked_p (e->base.from);
6362 }
6363 };
6364
6365 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash;
6366
6367 void
6368 record_tm_clone_pair (tree o, tree n)
6369 {
6370 struct tree_map **slot, *h;
6371
6372 if (tm_clone_hash == NULL)
6373 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
6374
6375 h = ggc_alloc<tree_map> ();
6376 h->hash = htab_hash_pointer (o);
6377 h->base.from = o;
6378 h->to = n;
6379
6380 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
6381 *slot = h;
6382 }
6383
6384 tree
6385 get_tm_clone_pair (tree o)
6386 {
6387 if (tm_clone_hash)
6388 {
6389 struct tree_map *h, in;
6390
6391 in.base.from = o;
6392 in.hash = htab_hash_pointer (o);
6393 h = tm_clone_hash->find_with_hash (&in, in.hash);
6394 if (h)
6395 return h->to;
6396 }
6397 return NULL_TREE;
6398 }
6399
6400 struct tm_alias_pair
6401 {
6402 unsigned int uid;
6403 tree from;
6404 tree to;
6405 };
6406
6407
6408 /* Dump the actual pairs to the .tm_clone_table section. */
6409
6410 static void
6411 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
6412 {
6413 unsigned i;
6414 tm_alias_pair *p;
6415 bool switched = false;
6416
6417 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
6418 {
6419 tree src = p->from;
6420 tree dst = p->to;
6421 struct cgraph_node *src_n = cgraph_node::get (src);
6422 struct cgraph_node *dst_n = cgraph_node::get (dst);
6423
6424 /* The function ipa_tm_create_version() marks the clone as needed if
6425 the original function was needed. But we also mark the clone as
6426 needed if we ever called the clone indirectly through
6427 TM_GETTMCLONE. If neither of these are true, we didn't generate
6428 a clone, and we didn't call it indirectly... no sense keeping it
6429 in the clone table. */
6430 if (!dst_n || !dst_n->definition)
6431 continue;
6432
6433 /* This covers the case where we have optimized the original
6434 function away, and only access the transactional clone. */
6435 if (!src_n || !src_n->definition)
6436 continue;
6437
6438 if (!switched)
6439 {
6440 switch_to_section (targetm.asm_out.tm_clone_table_section ());
6441 assemble_align (POINTER_SIZE);
6442 switched = true;
6443 }
6444
6445 assemble_integer (XEXP (DECL_RTL (src), 0),
6446 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6447 assemble_integer (XEXP (DECL_RTL (dst), 0),
6448 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6449 }
6450 }
6451
6452 /* Provide a default for the tm_clone_table section. */
6453
6454 section *
6455 default_clone_table_section (void)
6456 {
6457 return get_named_section (NULL, ".tm_clone_table", 3);
6458 }
6459
6460 /* Helper comparison function for qsorting by the DECL_UID stored in
6461 alias_pair->emitted_diags. */
6462
6463 static int
6464 tm_alias_pair_cmp (const void *x, const void *y)
6465 {
6466 const tm_alias_pair *p1 = (const tm_alias_pair *) x;
6467 const tm_alias_pair *p2 = (const tm_alias_pair *) y;
6468 if (p1->uid < p2->uid)
6469 return -1;
6470 if (p1->uid > p2->uid)
6471 return 1;
6472 return 0;
6473 }
6474
6475 void
6476 finish_tm_clone_pairs (void)
6477 {
6478 vec<tm_alias_pair> tm_alias_pairs = vNULL;
6479
6480 if (tm_clone_hash == NULL)
6481 return;
6482
6483 /* We need a determenistic order for the .tm_clone_table, otherwise
6484 we will get bootstrap comparison failures, so dump the hash table
6485 to a vector, sort it, and dump the vector. */
6486
6487 /* Dump the hashtable to a vector. */
6488 tree_map *map;
6489 hash_table<tm_clone_hasher>::iterator iter;
6490 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
6491 {
6492 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
6493 tm_alias_pairs.safe_push (p);
6494 }
6495 /* Sort it. */
6496 tm_alias_pairs.qsort (tm_alias_pair_cmp);
6497
6498 /* Dump it. */
6499 dump_tm_clone_pairs (tm_alias_pairs);
6500
6501 tm_clone_hash->empty ();
6502 tm_clone_hash = NULL;
6503 tm_alias_pairs.release ();
6504 }
6505
6506
6507 /* Emit an assembler directive to set symbol for DECL visibility to
6508 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */
6509
6510 void
6511 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED,
6512 int vis ATTRIBUTE_UNUSED)
6513 {
6514 #ifdef HAVE_GAS_HIDDEN
6515 static const char * const visibility_types[] = {
6516 NULL, "protected", "hidden", "internal"
6517 };
6518
6519 const char *name, *type;
6520 tree id;
6521
6522 id = DECL_ASSEMBLER_NAME (decl);
6523 ultimate_transparent_alias_target (&id);
6524 name = IDENTIFIER_POINTER (id);
6525
6526 type = visibility_types[vis];
6527
6528 fprintf (asm_out_file, "\t.%s\t", type);
6529 assemble_name (asm_out_file, name);
6530 fprintf (asm_out_file, "\n");
6531 #else
6532 if (!DECL_ARTIFICIAL (decl))
6533 warning (OPT_Wattributes, "visibility attribute not supported "
6534 "in this configuration; ignored");
6535 #endif
6536 }
6537
6538 /* A helper function to call assemble_visibility when needed for a decl. */
6539
6540 int
6541 maybe_assemble_visibility (tree decl)
6542 {
6543 enum symbol_visibility vis = DECL_VISIBILITY (decl);
6544 if (vis != VISIBILITY_DEFAULT)
6545 {
6546 targetm.asm_out.assemble_visibility (decl, vis);
6547 return 1;
6548 }
6549 else
6550 return 0;
6551 }
6552
6553 /* Returns 1 if the target configuration supports defining public symbols
6554 so that one of them will be chosen at link time instead of generating a
6555 multiply-defined symbol error, whether through the use of weak symbols or
6556 a target-specific mechanism for having duplicates discarded. */
6557
6558 int
6559 supports_one_only (void)
6560 {
6561 if (SUPPORTS_ONE_ONLY)
6562 return 1;
6563 return TARGET_SUPPORTS_WEAK;
6564 }
6565
6566 /* Set up DECL as a public symbol that can be defined in multiple
6567 translation units without generating a linker error. */
6568
6569 void
6570 make_decl_one_only (tree decl, tree comdat_group)
6571 {
6572 struct symtab_node *symbol;
6573 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6574
6575 TREE_PUBLIC (decl) = 1;
6576
6577 if (VAR_P (decl))
6578 symbol = varpool_node::get_create (decl);
6579 else
6580 symbol = cgraph_node::get_create (decl);
6581
6582 if (SUPPORTS_ONE_ONLY)
6583 {
6584 #ifdef MAKE_DECL_ONE_ONLY
6585 MAKE_DECL_ONE_ONLY (decl);
6586 #endif
6587 symbol->set_comdat_group (comdat_group);
6588 }
6589 else if (VAR_P (decl)
6590 && (DECL_INITIAL (decl) == 0
6591 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
6592 DECL_COMMON (decl) = 1;
6593 else
6594 {
6595 gcc_assert (TARGET_SUPPORTS_WEAK);
6596 DECL_WEAK (decl) = 1;
6597 }
6598 }
6599
6600 void
6601 init_varasm_once (void)
6602 {
6603 section_htab = hash_table<section_hasher>::create_ggc (31);
6604 object_block_htab = hash_table<object_block_hasher>::create_ggc (31);
6605 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009);
6606
6607 shared_constant_pool = create_constant_pool ();
6608
6609 #ifdef TEXT_SECTION_ASM_OP
6610 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6611 TEXT_SECTION_ASM_OP);
6612 #endif
6613
6614 #ifdef DATA_SECTION_ASM_OP
6615 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6616 DATA_SECTION_ASM_OP);
6617 #endif
6618
6619 #ifdef SDATA_SECTION_ASM_OP
6620 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6621 SDATA_SECTION_ASM_OP);
6622 #endif
6623
6624 #ifdef READONLY_DATA_SECTION_ASM_OP
6625 readonly_data_section = get_unnamed_section (0, output_section_asm_op,
6626 READONLY_DATA_SECTION_ASM_OP);
6627 #endif
6628
6629 #ifdef CTORS_SECTION_ASM_OP
6630 ctors_section = get_unnamed_section (0, output_section_asm_op,
6631 CTORS_SECTION_ASM_OP);
6632 #endif
6633
6634 #ifdef DTORS_SECTION_ASM_OP
6635 dtors_section = get_unnamed_section (0, output_section_asm_op,
6636 DTORS_SECTION_ASM_OP);
6637 #endif
6638
6639 #ifdef BSS_SECTION_ASM_OP
6640 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6641 output_section_asm_op,
6642 BSS_SECTION_ASM_OP);
6643 #endif
6644
6645 #ifdef SBSS_SECTION_ASM_OP
6646 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6647 output_section_asm_op,
6648 SBSS_SECTION_ASM_OP);
6649 #endif
6650
6651 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6652 | SECTION_COMMON, emit_tls_common);
6653 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6654 | SECTION_COMMON, emit_local);
6655 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6656 | SECTION_COMMON, emit_common);
6657
6658 #if defined ASM_OUTPUT_ALIGNED_BSS
6659 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS,
6660 emit_bss);
6661 #endif
6662
6663 targetm.asm_out.init_sections ();
6664
6665 if (readonly_data_section == NULL)
6666 readonly_data_section = text_section;
6667
6668 #ifdef ASM_OUTPUT_EXTERNAL
6669 pending_assemble_externals_set = new hash_set<tree>;
6670 #endif
6671 }
6672
6673 enum tls_model
6674 decl_default_tls_model (const_tree decl)
6675 {
6676 enum tls_model kind;
6677 bool is_local;
6678
6679 is_local = targetm.binds_local_p (decl);
6680 if (!flag_shlib)
6681 {
6682 if (is_local)
6683 kind = TLS_MODEL_LOCAL_EXEC;
6684 else
6685 kind = TLS_MODEL_INITIAL_EXEC;
6686 }
6687
6688 /* Local dynamic is inefficient when we're not combining the
6689 parts of the address. */
6690 else if (optimize && is_local)
6691 kind = TLS_MODEL_LOCAL_DYNAMIC;
6692 else
6693 kind = TLS_MODEL_GLOBAL_DYNAMIC;
6694 if (kind < flag_tls_default)
6695 kind = flag_tls_default;
6696
6697 return kind;
6698 }
6699
6700 /* Select a set of attributes for section NAME based on the properties
6701 of DECL and whether or not RELOC indicates that DECL's initializer
6702 might contain runtime relocations.
6703
6704 We make the section read-only and executable for a function decl,
6705 read-only for a const data decl, and writable for a non-const data decl. */
6706
6707 unsigned int
6708 default_section_type_flags (tree decl, const char *name, int reloc)
6709 {
6710 unsigned int flags;
6711
6712 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
6713 flags = SECTION_CODE;
6714 else if (decl)
6715 {
6716 enum section_category category
6717 = categorize_decl_for_section (decl, reloc);
6718 if (decl_readonly_section_1 (category))
6719 flags = 0;
6720 else if (category == SECCAT_DATA_REL_RO
6721 || category == SECCAT_DATA_REL_RO_LOCAL)
6722 flags = SECTION_WRITE | SECTION_RELRO;
6723 else
6724 flags = SECTION_WRITE;
6725 }
6726 else
6727 {
6728 flags = SECTION_WRITE;
6729 if (strcmp (name, ".data.rel.ro") == 0
6730 || strcmp (name, ".data.rel.ro.local") == 0)
6731 flags |= SECTION_RELRO;
6732 }
6733
6734 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl))
6735 flags |= SECTION_LINKONCE;
6736
6737 if (strcmp (name, ".vtable_map_vars") == 0)
6738 flags |= SECTION_LINKONCE;
6739
6740 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6741 flags |= SECTION_TLS | SECTION_WRITE;
6742
6743 if (strcmp (name, ".bss") == 0
6744 || startswith (name, ".bss.")
6745 || startswith (name, ".gnu.linkonce.b.")
6746 || strcmp (name, ".persistent.bss") == 0
6747 || strcmp (name, ".sbss") == 0
6748 || startswith (name, ".sbss.")
6749 || startswith (name, ".gnu.linkonce.sb."))
6750 flags |= SECTION_BSS;
6751
6752 if (strcmp (name, ".tdata") == 0
6753 || startswith (name, ".tdata.")
6754 || startswith (name, ".gnu.linkonce.td."))
6755 flags |= SECTION_TLS;
6756
6757 if (strcmp (name, ".tbss") == 0
6758 || startswith (name, ".tbss.")
6759 || startswith (name, ".gnu.linkonce.tb."))
6760 flags |= SECTION_TLS | SECTION_BSS;
6761
6762 if (strcmp (name, ".noinit") == 0)
6763 flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE;
6764
6765 if (strcmp (name, ".persistent") == 0)
6766 flags |= SECTION_WRITE | SECTION_NOTYPE;
6767
6768 /* Various sections have special ELF types that the assembler will
6769 assign by default based on the name. They are neither SHT_PROGBITS
6770 nor SHT_NOBITS, so when changing sections we don't want to print a
6771 section type (@progbits or @nobits). Rather than duplicating the
6772 assembler's knowledge of what those special name patterns are, just
6773 let the assembler choose the type if we don't know a specific
6774 reason to set it to something other than the default. SHT_PROGBITS
6775 is the default for sections whose name is not specially known to
6776 the assembler, so it does no harm to leave the choice to the
6777 assembler when @progbits is the best thing we know to use. If
6778 someone is silly enough to emit code or TLS variables to one of
6779 these sections, then don't handle them specially.
6780
6781 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
6782 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
6783 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
6784 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
6785 flags |= SECTION_NOTYPE;
6786
6787 return flags;
6788 }
6789
6790 /* Return true if the target supports some form of global BSS,
6791 either through bss_noswitch_section, or by selecting a BSS
6792 section in TARGET_ASM_SELECT_SECTION. */
6793
6794 bool
6795 have_global_bss_p (void)
6796 {
6797 return bss_noswitch_section || targetm.have_switchable_bss_sections;
6798 }
6799
6800 /* Output assembly to switch to section NAME with attribute FLAGS.
6801 Four variants for common object file formats. */
6802
6803 void
6804 default_no_named_section (const char *name ATTRIBUTE_UNUSED,
6805 unsigned int flags ATTRIBUTE_UNUSED,
6806 tree decl ATTRIBUTE_UNUSED)
6807 {
6808 /* Some object formats don't support named sections at all. The
6809 front-end should already have flagged this as an error. */
6810 gcc_unreachable ();
6811 }
6812
6813 #ifndef TLS_SECTION_ASM_FLAG
6814 #define TLS_SECTION_ASM_FLAG 'T'
6815 #endif
6816
6817 void
6818 default_elf_asm_named_section (const char *name, unsigned int flags,
6819 tree decl)
6820 {
6821 char flagchars[11], *f = flagchars;
6822 unsigned int numeric_value = 0;
6823
6824 /* If we have already declared this section, we can use an
6825 abbreviated form to switch back to it -- unless this section is
6826 part of a COMDAT groups or with SHF_GNU_RETAIN or with SHF_LINK_ORDER,
6827 in which case GAS requires the full declaration every time. */
6828 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6829 && !(flags & (SECTION_RETAIN | SECTION_LINK_ORDER))
6830 && (flags & SECTION_DECLARED))
6831 {
6832 fprintf (asm_out_file, "\t.section\t%s\n", name);
6833 return;
6834 }
6835
6836 /* If we have a machine specific flag, then use the numeric value to pass
6837 this on to GAS. */
6838 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value))
6839 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value);
6840 else
6841 {
6842 if (!(flags & SECTION_DEBUG))
6843 *f++ = 'a';
6844 #if HAVE_GAS_SECTION_EXCLUDE
6845 if (flags & SECTION_EXCLUDE)
6846 *f++ = 'e';
6847 #endif
6848 if (flags & SECTION_WRITE)
6849 *f++ = 'w';
6850 if (flags & SECTION_CODE)
6851 *f++ = 'x';
6852 if (flags & SECTION_SMALL)
6853 *f++ = 's';
6854 if (flags & SECTION_MERGE)
6855 *f++ = 'M';
6856 if (flags & SECTION_STRINGS)
6857 *f++ = 'S';
6858 if (flags & SECTION_TLS)
6859 *f++ = TLS_SECTION_ASM_FLAG;
6860 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6861 *f++ = 'G';
6862 if (flags & SECTION_RETAIN)
6863 *f++ = 'R';
6864 if (flags & SECTION_LINK_ORDER)
6865 *f++ = 'o';
6866 #ifdef MACH_DEP_SECTION_ASM_FLAG
6867 if (flags & SECTION_MACH_DEP)
6868 *f++ = MACH_DEP_SECTION_ASM_FLAG;
6869 #endif
6870 *f = '\0';
6871 }
6872
6873 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
6874
6875 /* default_section_type_flags (above) knows which flags need special
6876 handling here, and sets NOTYPE when none of these apply so that the
6877 assembler's logic for default types can apply to user-chosen
6878 section names. */
6879 if (!(flags & SECTION_NOTYPE))
6880 {
6881 const char *type;
6882 const char *format;
6883
6884 if (flags & SECTION_BSS)
6885 type = "nobits";
6886 else
6887 type = "progbits";
6888
6889 format = ",@%s";
6890 /* On platforms that use "@" as the assembly comment character,
6891 use "%" instead. */
6892 if (strcmp (ASM_COMMENT_START, "@") == 0)
6893 format = ",%%%s";
6894 fprintf (asm_out_file, format, type);
6895
6896 if (flags & SECTION_ENTSIZE)
6897 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
6898 if (flags & SECTION_LINK_ORDER)
6899 {
6900 tree id = DECL_ASSEMBLER_NAME (decl);
6901 ultimate_transparent_alias_target (&id);
6902 const char *name = IDENTIFIER_POINTER (id);
6903 name = targetm.strip_name_encoding (name);
6904 fprintf (asm_out_file, ",%s", name);
6905 }
6906 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6907 {
6908 if (TREE_CODE (decl) == IDENTIFIER_NODE)
6909 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl));
6910 else
6911 fprintf (asm_out_file, ",%s,comdat",
6912 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
6913 }
6914 }
6915
6916 putc ('\n', asm_out_file);
6917 }
6918
6919 void
6920 default_coff_asm_named_section (const char *name, unsigned int flags,
6921 tree decl ATTRIBUTE_UNUSED)
6922 {
6923 char flagchars[8], *f = flagchars;
6924
6925 if (flags & SECTION_WRITE)
6926 *f++ = 'w';
6927 if (flags & SECTION_CODE)
6928 *f++ = 'x';
6929 *f = '\0';
6930
6931 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars);
6932 }
6933
6934 void
6935 default_pe_asm_named_section (const char *name, unsigned int flags,
6936 tree decl)
6937 {
6938 default_coff_asm_named_section (name, flags, decl);
6939
6940 if (flags & SECTION_LINKONCE)
6941 {
6942 /* Functions may have been compiled at various levels of
6943 optimization so we can't use `same_size' here.
6944 Instead, have the linker pick one. */
6945 fprintf (asm_out_file, "\t.linkonce %s\n",
6946 (flags & SECTION_CODE ? "discard" : "same_size"));
6947 }
6948 }
6949 \f
6950 /* The lame default section selector. */
6951
6952 section *
6953 default_select_section (tree decl, int reloc,
6954 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6955 {
6956 if (DECL_P (decl))
6957 {
6958 if (decl_readonly_section (decl, reloc))
6959 return readonly_data_section;
6960 }
6961 else if (TREE_CODE (decl) == CONSTRUCTOR)
6962 {
6963 if (! ((flag_pic && reloc)
6964 || !TREE_READONLY (decl)
6965 || TREE_SIDE_EFFECTS (decl)
6966 || !TREE_CONSTANT (decl)))
6967 return readonly_data_section;
6968 }
6969 else if (TREE_CODE (decl) == STRING_CST)
6970 return readonly_data_section;
6971 else if (! (flag_pic && reloc))
6972 return readonly_data_section;
6973
6974 return data_section;
6975 }
6976
6977 enum section_category
6978 categorize_decl_for_section (const_tree decl, int reloc)
6979 {
6980 enum section_category ret;
6981
6982 if (TREE_CODE (decl) == FUNCTION_DECL)
6983 return SECCAT_TEXT;
6984 else if (TREE_CODE (decl) == STRING_CST)
6985 {
6986 if ((flag_sanitize & SANITIZE_ADDRESS)
6987 && asan_protect_global (CONST_CAST_TREE (decl)))
6988 /* or !flag_merge_constants */
6989 return SECCAT_RODATA;
6990 else
6991 return SECCAT_RODATA_MERGE_STR;
6992 }
6993 else if (VAR_P (decl))
6994 {
6995 tree d = CONST_CAST_TREE (decl);
6996 if (bss_initializer_p (decl))
6997 ret = SECCAT_BSS;
6998 else if (! TREE_READONLY (decl)
6999 || TREE_SIDE_EFFECTS (decl)
7000 || (DECL_INITIAL (decl)
7001 && ! TREE_CONSTANT (DECL_INITIAL (decl))))
7002 {
7003 /* Here the reloc_rw_mask is not testing whether the section should
7004 be read-only or not, but whether the dynamic link will have to
7005 do something. If so, we wish to segregate the data in order to
7006 minimize cache misses inside the dynamic linker. */
7007 if (reloc & targetm.asm_out.reloc_rw_mask ())
7008 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
7009 else
7010 ret = SECCAT_DATA;
7011 }
7012 else if (reloc & targetm.asm_out.reloc_rw_mask ())
7013 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
7014 else if (reloc || flag_merge_constants < 2
7015 || ((flag_sanitize & SANITIZE_ADDRESS)
7016 /* PR 81697: for architectures that use section anchors we
7017 need to ignore DECL_RTL_SET_P (decl) for string constants
7018 inside this asan_protect_global call because otherwise
7019 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
7020 section, set DECL_RTL (decl) later on and add DECL to
7021 protected globals via successive asan_protect_global
7022 calls. In this scenario we'll end up with wrong
7023 alignment of these strings at runtime and possible ASan
7024 false positives. */
7025 && asan_protect_global (d, use_object_blocks_p ()
7026 && use_blocks_for_decl_p (d))))
7027 /* C and C++ don't allow different variables to share the same
7028 location. -fmerge-all-constants allows even that (at the
7029 expense of not conforming). */
7030 ret = SECCAT_RODATA;
7031 else if (DECL_INITIAL (decl)
7032 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
7033 ret = SECCAT_RODATA_MERGE_STR_INIT;
7034 else
7035 ret = SECCAT_RODATA_MERGE_CONST;
7036 }
7037 else if (TREE_CODE (decl) == CONSTRUCTOR)
7038 {
7039 if ((reloc & targetm.asm_out.reloc_rw_mask ())
7040 || TREE_SIDE_EFFECTS (decl)
7041 || ! TREE_CONSTANT (decl))
7042 ret = SECCAT_DATA;
7043 else
7044 ret = SECCAT_RODATA;
7045 }
7046 else
7047 ret = SECCAT_RODATA;
7048
7049 /* There are no read-only thread-local sections. */
7050 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7051 {
7052 /* Note that this would be *just* SECCAT_BSS, except that there's
7053 no concept of a read-only thread-local-data section. */
7054 if (ret == SECCAT_BSS
7055 || DECL_INITIAL (decl) == NULL
7056 || (flag_zero_initialized_in_bss
7057 && initializer_zerop (DECL_INITIAL (decl))))
7058 ret = SECCAT_TBSS;
7059 else
7060 ret = SECCAT_TDATA;
7061 }
7062
7063 /* If the target uses small data sections, select it. */
7064 else if (targetm.in_small_data_p (decl))
7065 {
7066 if (ret == SECCAT_BSS)
7067 ret = SECCAT_SBSS;
7068 else if (targetm.have_srodata_section && ret == SECCAT_RODATA)
7069 ret = SECCAT_SRODATA;
7070 else
7071 ret = SECCAT_SDATA;
7072 }
7073
7074 return ret;
7075 }
7076
7077 static bool
7078 decl_readonly_section_1 (enum section_category category)
7079 {
7080 switch (category)
7081 {
7082 case SECCAT_RODATA:
7083 case SECCAT_RODATA_MERGE_STR:
7084 case SECCAT_RODATA_MERGE_STR_INIT:
7085 case SECCAT_RODATA_MERGE_CONST:
7086 case SECCAT_SRODATA:
7087 return true;
7088 default:
7089 return false;
7090 }
7091 }
7092
7093 bool
7094 decl_readonly_section (const_tree decl, int reloc)
7095 {
7096 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc));
7097 }
7098
7099 /* Select a section based on the above categorization. */
7100
7101 section *
7102 default_elf_select_section (tree decl, int reloc,
7103 unsigned HOST_WIDE_INT align)
7104 {
7105 const char *sname;
7106
7107 switch (categorize_decl_for_section (decl, reloc))
7108 {
7109 case SECCAT_TEXT:
7110 /* We're not supposed to be called on FUNCTION_DECLs. */
7111 gcc_unreachable ();
7112 case SECCAT_RODATA:
7113 return readonly_data_section;
7114 case SECCAT_RODATA_MERGE_STR:
7115 return mergeable_string_section (decl, align, 0);
7116 case SECCAT_RODATA_MERGE_STR_INIT:
7117 return mergeable_string_section (DECL_INITIAL (decl), align, 0);
7118 case SECCAT_RODATA_MERGE_CONST:
7119 return mergeable_constant_section (DECL_MODE (decl), align, 0);
7120 case SECCAT_SRODATA:
7121 sname = ".sdata2";
7122 break;
7123 case SECCAT_DATA:
7124 if (DECL_P (decl) && DECL_PERSISTENT_P (decl))
7125 {
7126 sname = ".persistent";
7127 break;
7128 }
7129 return data_section;
7130 case SECCAT_DATA_REL:
7131 sname = ".data.rel";
7132 break;
7133 case SECCAT_DATA_REL_LOCAL:
7134 sname = ".data.rel.local";
7135 break;
7136 case SECCAT_DATA_REL_RO:
7137 sname = ".data.rel.ro";
7138 break;
7139 case SECCAT_DATA_REL_RO_LOCAL:
7140 sname = ".data.rel.ro.local";
7141 break;
7142 case SECCAT_SDATA:
7143 sname = ".sdata";
7144 break;
7145 case SECCAT_TDATA:
7146 sname = ".tdata";
7147 break;
7148 case SECCAT_BSS:
7149 if (DECL_P (decl) && DECL_NOINIT_P (decl))
7150 {
7151 sname = ".noinit";
7152 break;
7153 }
7154 if (bss_section)
7155 return bss_section;
7156 sname = ".bss";
7157 break;
7158 case SECCAT_SBSS:
7159 sname = ".sbss";
7160 break;
7161 case SECCAT_TBSS:
7162 sname = ".tbss";
7163 break;
7164 default:
7165 gcc_unreachable ();
7166 }
7167
7168 return get_named_section (decl, sname, reloc);
7169 }
7170
7171 /* Construct a unique section name based on the decl name and the
7172 categorization performed above. */
7173
7174 void
7175 default_unique_section (tree decl, int reloc)
7176 {
7177 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
7178 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
7179 const char *prefix, *name, *linkonce;
7180 char *string;
7181 tree id;
7182
7183 switch (categorize_decl_for_section (decl, reloc))
7184 {
7185 case SECCAT_TEXT:
7186 prefix = one_only ? ".t" : ".text";
7187 break;
7188 case SECCAT_RODATA:
7189 case SECCAT_RODATA_MERGE_STR:
7190 case SECCAT_RODATA_MERGE_STR_INIT:
7191 case SECCAT_RODATA_MERGE_CONST:
7192 prefix = one_only ? ".r" : ".rodata";
7193 break;
7194 case SECCAT_SRODATA:
7195 prefix = one_only ? ".s2" : ".sdata2";
7196 break;
7197 case SECCAT_DATA:
7198 prefix = one_only ? ".d" : ".data";
7199 if (DECL_P (decl) && DECL_PERSISTENT_P (decl))
7200 {
7201 prefix = one_only ? ".p" : ".persistent";
7202 break;
7203 }
7204 break;
7205 case SECCAT_DATA_REL:
7206 prefix = one_only ? ".d.rel" : ".data.rel";
7207 break;
7208 case SECCAT_DATA_REL_LOCAL:
7209 prefix = one_only ? ".d.rel.local" : ".data.rel.local";
7210 break;
7211 case SECCAT_DATA_REL_RO:
7212 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
7213 break;
7214 case SECCAT_DATA_REL_RO_LOCAL:
7215 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
7216 break;
7217 case SECCAT_SDATA:
7218 prefix = one_only ? ".s" : ".sdata";
7219 break;
7220 case SECCAT_BSS:
7221 if (DECL_P (decl) && DECL_NOINIT_P (decl))
7222 {
7223 prefix = one_only ? ".n" : ".noinit";
7224 break;
7225 }
7226 prefix = one_only ? ".b" : ".bss";
7227 break;
7228 case SECCAT_SBSS:
7229 prefix = one_only ? ".sb" : ".sbss";
7230 break;
7231 case SECCAT_TDATA:
7232 prefix = one_only ? ".td" : ".tdata";
7233 break;
7234 case SECCAT_TBSS:
7235 prefix = one_only ? ".tb" : ".tbss";
7236 break;
7237 default:
7238 gcc_unreachable ();
7239 }
7240
7241 id = DECL_ASSEMBLER_NAME (decl);
7242 ultimate_transparent_alias_target (&id);
7243 name = IDENTIFIER_POINTER (id);
7244 name = targetm.strip_name_encoding (name);
7245
7246 /* If we're using one_only, then there needs to be a .gnu.linkonce
7247 prefix to the section name. */
7248 linkonce = one_only ? ".gnu.linkonce" : "";
7249
7250 string = ACONCAT ((linkonce, prefix, ".", name, NULL));
7251
7252 set_decl_section_name (decl, string);
7253 }
7254
7255 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */
7256
7257 static int
7258 compute_reloc_for_rtx_1 (const_rtx x)
7259 {
7260 switch (GET_CODE (x))
7261 {
7262 case SYMBOL_REF:
7263 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
7264 case LABEL_REF:
7265 return 1;
7266 default:
7267 return 0;
7268 }
7269 }
7270
7271 /* Like compute_reloc_for_constant, except for an RTX. The return value
7272 is a mask for which bit 1 indicates a global relocation, and bit 0
7273 indicates a local relocation. Used by default_select_rtx_section
7274 and default_elf_select_rtx_section. */
7275
7276 static int
7277 compute_reloc_for_rtx (const_rtx x)
7278 {
7279 switch (GET_CODE (x))
7280 {
7281 case SYMBOL_REF:
7282 case LABEL_REF:
7283 return compute_reloc_for_rtx_1 (x);
7284
7285 case CONST:
7286 {
7287 int reloc = 0;
7288 subrtx_iterator::array_type array;
7289 FOR_EACH_SUBRTX (iter, array, x, ALL)
7290 reloc |= compute_reloc_for_rtx_1 (*iter);
7291 return reloc;
7292 }
7293
7294 default:
7295 return 0;
7296 }
7297 }
7298
7299 section *
7300 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
7301 rtx x,
7302 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
7303 {
7304 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
7305 return data_section;
7306 else
7307 return readonly_data_section;
7308 }
7309
7310 section *
7311 default_elf_select_rtx_section (machine_mode mode, rtx x,
7312 unsigned HOST_WIDE_INT align)
7313 {
7314 int reloc = compute_reloc_for_rtx (x);
7315
7316 /* ??? Handle small data here somehow. */
7317
7318 if (reloc & targetm.asm_out.reloc_rw_mask ())
7319 {
7320 if (reloc == 1)
7321 return get_named_section (NULL, ".data.rel.ro.local", 1);
7322 else
7323 return get_named_section (NULL, ".data.rel.ro", 3);
7324 }
7325
7326 return mergeable_constant_section (mode, align, 0);
7327 }
7328
7329 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */
7330
7331 void
7332 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
7333 {
7334 rtx symbol;
7335 int flags;
7336
7337 /* Careful not to prod global register variables. */
7338 if (!MEM_P (rtl))
7339 return;
7340 symbol = XEXP (rtl, 0);
7341 if (GET_CODE (symbol) != SYMBOL_REF)
7342 return;
7343
7344 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO;
7345 if (TREE_CODE (decl) == FUNCTION_DECL)
7346 flags |= SYMBOL_FLAG_FUNCTION;
7347 if (targetm.binds_local_p (decl))
7348 flags |= SYMBOL_FLAG_LOCAL;
7349 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7350 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
7351 else if (targetm.in_small_data_p (decl))
7352 flags |= SYMBOL_FLAG_SMALL;
7353 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without
7354 being PUBLIC, the thing *must* be defined in this translation unit.
7355 Prevent this buglet from being propagated into rtl code as well. */
7356 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
7357 flags |= SYMBOL_FLAG_EXTERNAL;
7358
7359 SYMBOL_REF_FLAGS (symbol) = flags;
7360 }
7361
7362 /* By default, we do nothing for encode_section_info, so we need not
7363 do anything but discard the '*' marker. */
7364
7365 const char *
7366 default_strip_name_encoding (const char *str)
7367 {
7368 return str + (*str == '*');
7369 }
7370
7371 #ifdef ASM_OUTPUT_DEF
7372 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
7373 anchor relative to ".", the current section position. */
7374
7375 void
7376 default_asm_output_anchor (rtx symbol)
7377 {
7378 char buffer[100];
7379
7380 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
7381 SYMBOL_REF_BLOCK_OFFSET (symbol));
7382 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
7383 }
7384 #endif
7385
7386 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
7387
7388 bool
7389 default_use_anchors_for_symbol_p (const_rtx symbol)
7390 {
7391 tree decl;
7392 section *sect = SYMBOL_REF_BLOCK (symbol)->sect;
7393
7394 /* This function should only be called with non-zero SYMBOL_REF_BLOCK,
7395 furthermore get_block_for_section should not create object blocks
7396 for mergeable sections. */
7397 gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE));
7398
7399 /* Don't use anchors for small data sections. The small data register
7400 acts as an anchor for such sections. */
7401 if (sect->common.flags & SECTION_SMALL)
7402 return false;
7403
7404 decl = SYMBOL_REF_DECL (symbol);
7405 if (decl && DECL_P (decl))
7406 {
7407 /* Don't use section anchors for decls that might be defined or
7408 usurped by other modules. */
7409 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl))
7410 return false;
7411
7412 /* Don't use section anchors for decls that will be placed in a
7413 small data section. */
7414 /* ??? Ideally, this check would be redundant with the SECTION_SMALL
7415 one above. The problem is that we only use SECTION_SMALL for
7416 sections that should be marked as small in the section directive. */
7417 if (targetm.in_small_data_p (decl))
7418 return false;
7419
7420 /* Don't use section anchors for decls that won't fit inside a single
7421 anchor range to reduce the amount of instructions required to refer
7422 to the entire declaration. */
7423 if (DECL_SIZE_UNIT (decl) == NULL_TREE
7424 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))
7425 || (tree_to_uhwi (DECL_SIZE_UNIT (decl))
7426 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset))
7427 return false;
7428
7429 }
7430 return true;
7431 }
7432
7433 /* Return true when RESOLUTION indicate that symbol will be bound to the
7434 definition provided by current .o file. */
7435
7436 static bool
7437 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution)
7438 {
7439 return (resolution == LDPR_PREVAILING_DEF
7440 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7441 || resolution == LDPR_PREVAILING_DEF_IRONLY);
7442 }
7443
7444 /* Return true when RESOLUTION indicate that symbol will be bound locally
7445 within current executable or DSO. */
7446
7447 static bool
7448 resolution_local_p (enum ld_plugin_symbol_resolution resolution)
7449 {
7450 return (resolution == LDPR_PREVAILING_DEF
7451 || resolution == LDPR_PREVAILING_DEF_IRONLY
7452 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7453 || resolution == LDPR_PREEMPTED_REG
7454 || resolution == LDPR_PREEMPTED_IR
7455 || resolution == LDPR_RESOLVED_IR
7456 || resolution == LDPR_RESOLVED_EXEC);
7457 }
7458
7459 /* COMMON_LOCAL_P is true means that the linker can guarantee that an
7460 uninitialized common symbol in the executable will still be defined
7461 (through COPY relocation) in the executable. */
7462
7463 bool
7464 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate,
7465 bool extern_protected_data, bool common_local_p)
7466 {
7467 /* A non-decl is an entry in the constant pool. */
7468 if (!DECL_P (exp))
7469 return true;
7470
7471 /* Weakrefs may not bind locally, even though the weakref itself is always
7472 static and therefore local. Similarly, the resolver for ifunc functions
7473 might resolve to a non-local function.
7474 FIXME: We can resolve the weakref case more curefuly by looking at the
7475 weakref alias. */
7476 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
7477 || (TREE_CODE (exp) == FUNCTION_DECL
7478 && cgraph_node::get (exp)
7479 && cgraph_node::get (exp)->ifunc_resolver))
7480 return false;
7481
7482 /* Static variables are always local. */
7483 if (! TREE_PUBLIC (exp))
7484 return true;
7485
7486 /* With resolution file in hand, take look into resolutions.
7487 We can't just return true for resolved_locally symbols,
7488 because dynamic linking might overwrite symbols
7489 in shared libraries. */
7490 bool resolved_locally = false;
7491
7492 bool uninited_common = (DECL_COMMON (exp)
7493 && (DECL_INITIAL (exp) == NULL
7494 || (!in_lto_p
7495 && DECL_INITIAL (exp) == error_mark_node)));
7496
7497 /* A non-external variable is defined locally only if it isn't
7498 uninitialized COMMON variable or common_local_p is true. */
7499 bool defined_locally = (!DECL_EXTERNAL (exp)
7500 && (!uninited_common || common_local_p));
7501 if (symtab_node *node = symtab_node::get (exp))
7502 {
7503 if (node->in_other_partition)
7504 defined_locally = true;
7505 if (node->can_be_discarded_p ())
7506 ;
7507 else if (resolution_to_local_definition_p (node->resolution))
7508 defined_locally = resolved_locally = true;
7509 else if (resolution_local_p (node->resolution))
7510 resolved_locally = true;
7511 }
7512 if (defined_locally && weak_dominate && !shlib)
7513 resolved_locally = true;
7514
7515 /* Undefined weak symbols are never defined locally. */
7516 if (DECL_WEAK (exp) && !defined_locally)
7517 return false;
7518
7519 /* A symbol is local if the user has said explicitly that it will be,
7520 or if we have a definition for the symbol. We cannot infer visibility
7521 for undefined symbols. */
7522 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT
7523 && (TREE_CODE (exp) == FUNCTION_DECL
7524 || !extern_protected_data
7525 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED)
7526 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally))
7527 return true;
7528
7529 /* If PIC, then assume that any global name can be overridden by
7530 symbols resolved from other modules. */
7531 if (shlib)
7532 return false;
7533
7534 /* Variables defined outside this object might not be local. */
7535 if (DECL_EXTERNAL (exp) && !resolved_locally)
7536 return false;
7537
7538 /* Non-dominant weak symbols are not defined locally. */
7539 if (DECL_WEAK (exp) && !resolved_locally)
7540 return false;
7541
7542 /* Uninitialized COMMON variable may be unified with symbols
7543 resolved from other modules. */
7544 if (uninited_common && !resolved_locally)
7545 return false;
7546
7547 /* Otherwise we're left with initialized (or non-common) global data
7548 which is of necessity defined locally. */
7549 return true;
7550 }
7551
7552 /* Assume ELF-ish defaults, since that's pretty much the most liberal
7553 wrt cross-module name binding. */
7554
7555 bool
7556 default_binds_local_p (const_tree exp)
7557 {
7558 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false);
7559 }
7560
7561 /* Similar to default_binds_local_p, but common symbol may be local and
7562 extern protected data is non-local. */
7563
7564 bool
7565 default_binds_local_p_2 (const_tree exp)
7566 {
7567 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true,
7568 !flag_pic);
7569 }
7570
7571 bool
7572 default_binds_local_p_1 (const_tree exp, int shlib)
7573 {
7574 return default_binds_local_p_3 (exp, shlib != 0, false, false, false);
7575 }
7576
7577 /* Return true when references to DECL must bind to current definition in
7578 final executable.
7579
7580 The condition is usually equivalent to whether the function binds to the
7581 current module (shared library or executable), that is to binds_local_p.
7582 We use this fact to avoid need for another target hook and implement
7583 the logic using binds_local_p and just special cases where
7584 decl_binds_to_current_def_p is stronger than binds_local_p. In particular
7585 the weak definitions (that can be overwritten at linktime by other
7586 definition from different object file) and when resolution info is available
7587 we simply use the knowledge passed to us by linker plugin. */
7588 bool
7589 decl_binds_to_current_def_p (const_tree decl)
7590 {
7591 gcc_assert (DECL_P (decl));
7592 if (!targetm.binds_local_p (decl))
7593 return false;
7594 if (!TREE_PUBLIC (decl))
7595 return true;
7596
7597 /* When resolution is available, just use it. */
7598 if (symtab_node *node = symtab_node::get (decl))
7599 {
7600 if (node->resolution != LDPR_UNKNOWN
7601 && !node->can_be_discarded_p ())
7602 return resolution_to_local_definition_p (node->resolution);
7603 }
7604
7605 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
7606 binds locally but still can be overwritten), DECL_COMMON (can be merged
7607 with a non-common definition somewhere in the same module) or
7608 DECL_EXTERNAL.
7609 This rely on fact that binds_local_p behave as decl_replaceable_p
7610 for all other declaration types. */
7611 if (DECL_WEAK (decl))
7612 return false;
7613 if (DECL_COMMON (decl)
7614 && (DECL_INITIAL (decl) == NULL
7615 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
7616 return false;
7617 if (DECL_EXTERNAL (decl))
7618 return false;
7619 return true;
7620 }
7621
7622 /* A replaceable function or variable is one which may be replaced
7623 at link-time with an entirely different definition, provided that the
7624 replacement has the same type. For example, functions declared
7625 with __attribute__((weak)) on most systems are replaceable.
7626 If SEMANTIC_INTERPOSITION_P is false allow interposition only on
7627 symbols explicitly declared weak.
7628
7629 COMDAT functions are not replaceable, since all definitions of the
7630 function must be equivalent. It is important that COMDAT functions
7631 not be treated as replaceable so that use of C++ template
7632 instantiations is not penalized. */
7633
7634 bool
7635 decl_replaceable_p (tree decl, bool semantic_interposition_p)
7636 {
7637 gcc_assert (DECL_P (decl));
7638 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl))
7639 return false;
7640 if (!semantic_interposition_p
7641 && !DECL_WEAK (decl))
7642 return false;
7643 return !decl_binds_to_current_def_p (decl);
7644 }
7645
7646 /* Default function to output code that will globalize a label. A
7647 target must define GLOBAL_ASM_OP or provide its own function to
7648 globalize a label. */
7649 #ifdef GLOBAL_ASM_OP
7650 void
7651 default_globalize_label (FILE * stream, const char *name)
7652 {
7653 fputs (GLOBAL_ASM_OP, stream);
7654 assemble_name (stream, name);
7655 putc ('\n', stream);
7656 }
7657 #endif /* GLOBAL_ASM_OP */
7658
7659 /* Default function to output code that will globalize a declaration. */
7660 void
7661 default_globalize_decl_name (FILE * stream, tree decl)
7662 {
7663 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7664 targetm.asm_out.globalize_label (stream, name);
7665 }
7666
7667 /* Default function to output a label for unwind information. The
7668 default is to do nothing. A target that needs nonlocal labels for
7669 unwind information must provide its own function to do this. */
7670 void
7671 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
7672 tree decl ATTRIBUTE_UNUSED,
7673 int for_eh ATTRIBUTE_UNUSED,
7674 int empty ATTRIBUTE_UNUSED)
7675 {
7676 }
7677
7678 /* Default function to output a label to divide up the exception table.
7679 The default is to do nothing. A target that needs/wants to divide
7680 up the table must provide it's own function to do this. */
7681 void
7682 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED)
7683 {
7684 }
7685
7686 /* This is how to output an internal numbered label where PREFIX is
7687 the class of label and LABELNO is the number within the class. */
7688
7689 void
7690 default_generate_internal_label (char *buf, const char *prefix,
7691 unsigned long labelno)
7692 {
7693 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7694 }
7695
7696 /* This is how to output an internal numbered label where PREFIX is
7697 the class of label and LABELNO is the number within the class. */
7698
7699 void
7700 default_internal_label (FILE *stream, const char *prefix,
7701 unsigned long labelno)
7702 {
7703 char *const buf = (char *) alloca (40 + strlen (prefix));
7704 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7705 ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
7706 }
7707
7708
7709 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */
7710
7711 void
7712 default_asm_declare_constant_name (FILE *file, const char *name,
7713 const_tree exp ATTRIBUTE_UNUSED,
7714 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7715 {
7716 assemble_label (file, name);
7717 }
7718
7719 /* This is the default behavior at the beginning of a file. It's
7720 controlled by two other target-hook toggles. */
7721 void
7722 default_file_start (void)
7723 {
7724 if (targetm.asm_file_start_app_off
7725 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
7726 fputs (ASM_APP_OFF, asm_out_file);
7727
7728 if (targetm.asm_file_start_file_directive)
7729 {
7730 /* LTO produced units have no meaningful main_input_filename. */
7731 if (in_lto_p)
7732 output_file_directive (asm_out_file, "<artificial>");
7733 else
7734 output_file_directive (asm_out_file, main_input_filename);
7735 }
7736 }
7737
7738 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END
7739 which emits a special section directive used to indicate whether or
7740 not this object file needs an executable stack. This is primarily
7741 a GNU extension to ELF but could be used on other targets. */
7742
7743 int trampolines_created;
7744
7745 void
7746 file_end_indicate_exec_stack (void)
7747 {
7748 unsigned int flags = SECTION_DEBUG;
7749 if (trampolines_created)
7750 flags |= SECTION_CODE;
7751
7752 switch_to_section (get_section (".note.GNU-stack", flags, NULL));
7753 }
7754
7755 /* Emit a special section directive to indicate that this object file
7756 was compiled with -fsplit-stack. This is used to let the linker
7757 detect calls between split-stack code and non-split-stack code, so
7758 that it can modify the split-stack code to allocate a sufficiently
7759 large stack. We emit another special section if there are any
7760 functions in this file which have the no_split_stack attribute, to
7761 prevent the linker from warning about being unable to convert the
7762 functions if they call non-split-stack code. */
7763
7764 void
7765 file_end_indicate_split_stack (void)
7766 {
7767 if (flag_split_stack)
7768 {
7769 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG,
7770 NULL));
7771 if (saw_no_split_stack)
7772 switch_to_section (get_section (".note.GNU-no-split-stack",
7773 SECTION_DEBUG, NULL));
7774 }
7775 }
7776
7777 /* Output DIRECTIVE (a C string) followed by a newline. This is used as
7778 a get_unnamed_section callback. */
7779
7780 void
7781 output_section_asm_op (const void *directive)
7782 {
7783 fprintf (asm_out_file, "%s\n", (const char *) directive);
7784 }
7785
7786 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if
7787 the current section is NEW_SECTION. */
7788
7789 void
7790 switch_to_section (section *new_section, tree decl)
7791 {
7792 bool retain_p;
7793 if ((new_section->common.flags & SECTION_NAMED)
7794 && decl != nullptr
7795 && DECL_P (decl)
7796 && ((retain_p = !!lookup_attribute ("retain",
7797 DECL_ATTRIBUTES (decl)))
7798 != !!(new_section->common.flags & SECTION_RETAIN)))
7799 {
7800 /* If the SECTION_RETAIN bit doesn't match, switch to a new
7801 section. */
7802 tree used_decl, no_used_decl;
7803
7804 if (retain_p)
7805 {
7806 new_section->common.flags |= SECTION_RETAIN;
7807 used_decl = decl;
7808 no_used_decl = new_section->named.decl;
7809 }
7810 else
7811 {
7812 new_section->common.flags &= ~(SECTION_RETAIN
7813 | SECTION_DECLARED);
7814 used_decl = new_section->named.decl;
7815 no_used_decl = decl;
7816 }
7817 if (no_used_decl != used_decl)
7818 {
7819 warning (OPT_Wattributes,
7820 "%+qD without %<retain%> attribute and %qD with "
7821 "%<retain%> attribute are placed in a section with "
7822 "the same name", no_used_decl, used_decl);
7823 inform (DECL_SOURCE_LOCATION (used_decl),
7824 "%qD was declared here", used_decl);
7825 }
7826 }
7827 else if (in_section == new_section)
7828 return;
7829
7830 in_section = new_section;
7831
7832 switch (SECTION_STYLE (new_section))
7833 {
7834 case SECTION_NAMED:
7835 targetm.asm_out.named_section (new_section->named.name,
7836 new_section->named.common.flags,
7837 new_section->named.decl);
7838 break;
7839
7840 case SECTION_UNNAMED:
7841 new_section->unnamed.callback (new_section->unnamed.data);
7842 break;
7843
7844 case SECTION_NOSWITCH:
7845 gcc_unreachable ();
7846 break;
7847 }
7848
7849 new_section->common.flags |= SECTION_DECLARED;
7850 }
7851
7852 /* If block symbol SYMBOL has not yet been assigned an offset, place
7853 it at the end of its block. */
7854
7855 void
7856 place_block_symbol (rtx symbol)
7857 {
7858 unsigned HOST_WIDE_INT size, mask, offset;
7859 class constant_descriptor_rtx *desc;
7860 unsigned int alignment;
7861 struct object_block *block;
7862 tree decl;
7863
7864 gcc_assert (SYMBOL_REF_BLOCK (symbol));
7865 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
7866 return;
7867
7868 /* Work out the symbol's size and alignment. */
7869 if (CONSTANT_POOL_ADDRESS_P (symbol))
7870 {
7871 desc = SYMBOL_REF_CONSTANT (symbol);
7872 alignment = desc->align;
7873 size = GET_MODE_SIZE (desc->mode);
7874 }
7875 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7876 {
7877 decl = SYMBOL_REF_DECL (symbol);
7878 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl));
7879 alignment = DECL_ALIGN (decl);
7880 size = get_constant_size (DECL_INITIAL (decl));
7881 if ((flag_sanitize & SANITIZE_ADDRESS)
7882 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7883 && asan_protect_global (DECL_INITIAL (decl)))
7884 {
7885 size += asan_red_zone_size (size);
7886 alignment = MAX (alignment,
7887 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7888 }
7889 }
7890 else
7891 {
7892 struct symtab_node *snode;
7893 decl = SYMBOL_REF_DECL (symbol);
7894
7895 snode = symtab_node::get (decl);
7896 if (snode->alias)
7897 {
7898 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
7899
7900 gcc_assert (MEM_P (target)
7901 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF
7902 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0)));
7903 target = XEXP (target, 0);
7904 place_block_symbol (target);
7905 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
7906 return;
7907 }
7908 alignment = get_variable_align (decl);
7909 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7910 if ((flag_sanitize & SANITIZE_ADDRESS)
7911 && asan_protect_global (decl))
7912 {
7913 size += asan_red_zone_size (size);
7914 alignment = MAX (alignment,
7915 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7916 }
7917 }
7918
7919 /* Calculate the object's offset from the start of the block. */
7920 block = SYMBOL_REF_BLOCK (symbol);
7921 mask = alignment / BITS_PER_UNIT - 1;
7922 offset = (block->size + mask) & ~mask;
7923 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
7924
7925 /* Record the block's new alignment and size. */
7926 block->alignment = MAX (block->alignment, alignment);
7927 block->size = offset + size;
7928
7929 vec_safe_push (block->objects, symbol);
7930 }
7931
7932 /* Return the anchor that should be used to address byte offset OFFSET
7933 from the first object in BLOCK. MODEL is the TLS model used
7934 to access it. */
7935
7936 rtx
7937 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
7938 enum tls_model model)
7939 {
7940 char label[100];
7941 unsigned int begin, middle, end;
7942 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
7943 rtx anchor;
7944
7945 /* Work out the anchor's offset. Use an offset of 0 for the first
7946 anchor so that we don't pessimize the case where we take the address
7947 of a variable at the beginning of the block. This is particularly
7948 useful when a block has only one variable assigned to it.
7949
7950 We try to place anchors RANGE bytes apart, so there can then be
7951 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
7952 a ptr_mode offset. With some target settings, the lowest such
7953 anchor might be out of range for the lowest ptr_mode offset;
7954 likewise the highest anchor for the highest offset. Use anchors
7955 at the extreme ends of the ptr_mode range in such cases.
7956
7957 All arithmetic uses unsigned integers in order to avoid
7958 signed overflow. */
7959 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
7960 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
7961 range = max_offset - min_offset + 1;
7962 if (range == 0)
7963 offset = 0;
7964 else
7965 {
7966 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1);
7967 if (offset < 0)
7968 {
7969 delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
7970 delta -= delta % range;
7971 if (delta > bias)
7972 delta = bias;
7973 offset = (HOST_WIDE_INT) (-delta);
7974 }
7975 else
7976 {
7977 delta = (unsigned HOST_WIDE_INT) offset - min_offset;
7978 delta -= delta % range;
7979 if (delta > bias - 1)
7980 delta = bias - 1;
7981 offset = (HOST_WIDE_INT) delta;
7982 }
7983 }
7984
7985 /* Do a binary search to see if there's already an anchor we can use.
7986 Set BEGIN to the new anchor's index if not. */
7987 begin = 0;
7988 end = vec_safe_length (block->anchors);
7989 while (begin != end)
7990 {
7991 middle = (end + begin) / 2;
7992 anchor = (*block->anchors)[middle];
7993 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
7994 end = middle;
7995 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
7996 begin = middle + 1;
7997 else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
7998 end = middle;
7999 else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
8000 begin = middle + 1;
8001 else
8002 return anchor;
8003 }
8004
8005 /* Create a new anchor with a unique label. */
8006 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
8007 anchor = create_block_symbol (ggc_strdup (label), block, offset);
8008 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
8009 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
8010
8011 /* Insert it at index BEGIN. */
8012 vec_safe_insert (block->anchors, begin, anchor);
8013 return anchor;
8014 }
8015
8016 /* Output the objects in BLOCK. */
8017
8018 static void
8019 output_object_block (struct object_block *block)
8020 {
8021 class constant_descriptor_rtx *desc;
8022 unsigned int i;
8023 HOST_WIDE_INT offset;
8024 tree decl;
8025 rtx symbol;
8026
8027 if (!block->objects)
8028 return;
8029
8030 /* Switch to the section and make sure that the first byte is
8031 suitably aligned. */
8032 /* Special case VTV comdat sections similar to assemble_variable. */
8033 if (SECTION_STYLE (block->sect) == SECTION_NAMED
8034 && block->sect->named.name
8035 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0))
8036 handle_vtv_comdat_section (block->sect, block->sect->named.decl);
8037 else
8038 switch_to_section (block->sect, SYMBOL_REF_DECL ((*block->objects)[0]));
8039
8040 gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE));
8041 assemble_align (block->alignment);
8042
8043 /* Define the values of all anchors relative to the current section
8044 position. */
8045 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
8046 targetm.asm_out.output_anchor (symbol);
8047
8048 /* Output the objects themselves. */
8049 offset = 0;
8050 FOR_EACH_VEC_ELT (*block->objects, i, symbol)
8051 {
8052 /* Move to the object's offset, padding with zeros if necessary. */
8053 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
8054 offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
8055 if (CONSTANT_POOL_ADDRESS_P (symbol))
8056 {
8057 desc = SYMBOL_REF_CONSTANT (symbol);
8058 /* Pass 1 for align as we have already laid out everything in the block.
8059 So aligning shouldn't be necessary. */
8060 output_constant_pool_1 (desc, 1);
8061 offset += GET_MODE_SIZE (desc->mode);
8062 }
8063 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
8064 {
8065 HOST_WIDE_INT size;
8066 decl = SYMBOL_REF_DECL (symbol);
8067 assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0),
8068 DECL_ALIGN (decl), false);
8069
8070 size = get_constant_size (DECL_INITIAL (decl));
8071 offset += size;
8072 if ((flag_sanitize & SANITIZE_ADDRESS)
8073 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
8074 && asan_protect_global (DECL_INITIAL (decl)))
8075 {
8076 size = asan_red_zone_size (size);
8077 assemble_zeros (size);
8078 offset += size;
8079 }
8080 }
8081 else
8082 {
8083 HOST_WIDE_INT size;
8084 decl = SYMBOL_REF_DECL (symbol);
8085 assemble_variable_contents (decl, XSTR (symbol, 0), false, false);
8086 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
8087 offset += size;
8088 if ((flag_sanitize & SANITIZE_ADDRESS)
8089 && asan_protect_global (decl))
8090 {
8091 size = asan_red_zone_size (size);
8092 assemble_zeros (size);
8093 offset += size;
8094 }
8095 }
8096 }
8097 }
8098
8099 /* A callback for qsort to compare object_blocks. */
8100
8101 static int
8102 output_object_block_compare (const void *x, const void *y)
8103 {
8104 object_block *p1 = *(object_block * const*)x;
8105 object_block *p2 = *(object_block * const*)y;
8106
8107 if (p1->sect->common.flags & SECTION_NAMED
8108 && !(p2->sect->common.flags & SECTION_NAMED))
8109 return 1;
8110
8111 if (!(p1->sect->common.flags & SECTION_NAMED)
8112 && p2->sect->common.flags & SECTION_NAMED)
8113 return -1;
8114
8115 if (p1->sect->common.flags & SECTION_NAMED
8116 && p2->sect->common.flags & SECTION_NAMED)
8117 return strcmp (p1->sect->named.name, p2->sect->named.name);
8118
8119 unsigned f1 = p1->sect->common.flags;
8120 unsigned f2 = p2->sect->common.flags;
8121 if (f1 == f2)
8122 return 0;
8123 return f1 < f2 ? -1 : 1;
8124 }
8125
8126 /* Output the definitions of all object_blocks. */
8127
8128 void
8129 output_object_blocks (void)
8130 {
8131 vec<object_block *, va_heap> v;
8132 v.create (object_block_htab->elements ());
8133 object_block *obj;
8134 hash_table<object_block_hasher>::iterator hi;
8135
8136 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi)
8137 v.quick_push (obj);
8138
8139 /* Sort them in order to output them in a deterministic manner,
8140 otherwise we may get .rodata sections in different orders with
8141 and without -g. */
8142 v.qsort (output_object_block_compare);
8143 unsigned i;
8144 FOR_EACH_VEC_ELT (v, i, obj)
8145 output_object_block (obj);
8146
8147 v.release ();
8148 }
8149
8150 /* This function provides a possible implementation of the
8151 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
8152 by -frecord-gcc-switches it creates a new mergeable, string section in the
8153 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
8154 contains the switches in ASCII format.
8155
8156 FIXME: This code does not correctly handle double quote characters
8157 that appear inside strings, (it strips them rather than preserving them).
8158 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
8159 characters - instead it treats them as sub-string separators. Since
8160 we want to emit NUL strings terminators into the object file we have to use
8161 ASM_OUTPUT_SKIP. */
8162
8163 void
8164 elf_record_gcc_switches (const char *options)
8165 {
8166 section *sec = get_section (targetm.asm_out.record_gcc_switches_section,
8167 SECTION_DEBUG | SECTION_MERGE
8168 | SECTION_STRINGS | (SECTION_ENTSIZE & 1), NULL);
8169 switch_to_section (sec);
8170 ASM_OUTPUT_ASCII (asm_out_file, options, strlen (options) + 1);
8171 }
8172
8173 /* Emit text to declare externally defined symbols. It is needed to
8174 properly support non-default visibility. */
8175 void
8176 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
8177 tree decl,
8178 const char *name ATTRIBUTE_UNUSED)
8179 {
8180 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
8181 set in order to avoid putting out names that are never really
8182 used. Always output visibility specified in the source. */
8183 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
8184 && (DECL_VISIBILITY_SPECIFIED (decl)
8185 || targetm.binds_local_p (decl)))
8186 maybe_assemble_visibility (decl);
8187 }
8188
8189 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */
8190
8191 void
8192 default_asm_output_source_filename (FILE *file, const char *name)
8193 {
8194 #ifdef ASM_OUTPUT_SOURCE_FILENAME
8195 ASM_OUTPUT_SOURCE_FILENAME (file, name);
8196 #else
8197 fprintf (file, "\t.file\t");
8198 output_quoted_string (file, name);
8199 putc ('\n', file);
8200 #endif
8201 }
8202
8203 /* Output a file name in the form wanted by System V. */
8204
8205 void
8206 output_file_directive (FILE *asm_file, const char *input_name)
8207 {
8208 int len;
8209 const char *na;
8210
8211 if (input_name == NULL)
8212 input_name = "<stdin>";
8213 else
8214 input_name = remap_debug_filename (input_name);
8215
8216 len = strlen (input_name);
8217 na = input_name + len;
8218
8219 /* NA gets INPUT_NAME sans directory names. */
8220 while (na > input_name)
8221 {
8222 if (IS_DIR_SEPARATOR (na[-1]))
8223 break;
8224 na--;
8225 }
8226
8227 targetm.asm_out.output_source_filename (asm_file, na);
8228 }
8229
8230 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression
8231 EXP. */
8232 rtx
8233 make_debug_expr_from_rtl (const_rtx exp)
8234 {
8235 tree ddecl = make_node (DEBUG_EXPR_DECL), type;
8236 machine_mode mode = GET_MODE (exp);
8237 rtx dval;
8238
8239 DECL_ARTIFICIAL (ddecl) = 1;
8240 if (REG_P (exp) && REG_EXPR (exp))
8241 type = TREE_TYPE (REG_EXPR (exp));
8242 else if (MEM_P (exp) && MEM_EXPR (exp))
8243 type = TREE_TYPE (MEM_EXPR (exp));
8244 else
8245 type = NULL_TREE;
8246 if (type && TYPE_MODE (type) == mode)
8247 TREE_TYPE (ddecl) = type;
8248 else
8249 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1);
8250 SET_DECL_MODE (ddecl, mode);
8251 dval = gen_rtx_DEBUG_EXPR (mode);
8252 DEBUG_EXPR_TREE_DECL (dval) = ddecl;
8253 SET_DECL_RTL (ddecl, dval);
8254 return dval;
8255 }
8256
8257 #ifdef ELF_ASCII_ESCAPES
8258 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */
8259
8260 void
8261 default_elf_asm_output_limited_string (FILE *f, const char *s)
8262 {
8263 int escape;
8264 unsigned char c;
8265
8266 fputs (STRING_ASM_OP, f);
8267 putc ('"', f);
8268 while (*s != '\0')
8269 {
8270 c = *s;
8271 escape = ELF_ASCII_ESCAPES[c];
8272 switch (escape)
8273 {
8274 case 0:
8275 putc (c, f);
8276 break;
8277 case 1:
8278 putc ('\\', f);
8279 putc ('0'+((c>>6)&7), f);
8280 putc ('0'+((c>>3)&7), f);
8281 putc ('0'+(c&7), f);
8282 break;
8283 default:
8284 putc ('\\', f);
8285 putc (escape, f);
8286 break;
8287 }
8288 s++;
8289 }
8290 putc ('\"', f);
8291 putc ('\n', f);
8292 }
8293
8294 /* Default ASM_OUTPUT_ASCII for ELF targets. */
8295
8296 void
8297 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len)
8298 {
8299 const char *limit = s + len;
8300 const char *last_null = NULL;
8301 unsigned bytes_in_chunk = 0;
8302 unsigned char c;
8303 int escape;
8304
8305 for (; s < limit; s++)
8306 {
8307 const char *p;
8308
8309 if (bytes_in_chunk >= 60)
8310 {
8311 putc ('\"', f);
8312 putc ('\n', f);
8313 bytes_in_chunk = 0;
8314 }
8315
8316 if (s > last_null)
8317 {
8318 for (p = s; p < limit && *p != '\0'; p++)
8319 continue;
8320 last_null = p;
8321 }
8322 else
8323 p = last_null;
8324
8325 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT)
8326 {
8327 if (bytes_in_chunk > 0)
8328 {
8329 putc ('\"', f);
8330 putc ('\n', f);
8331 bytes_in_chunk = 0;
8332 }
8333
8334 default_elf_asm_output_limited_string (f, s);
8335 s = p;
8336 }
8337 else
8338 {
8339 if (bytes_in_chunk == 0)
8340 fputs (ASCII_DATA_ASM_OP "\"", f);
8341
8342 c = *s;
8343 escape = ELF_ASCII_ESCAPES[c];
8344 switch (escape)
8345 {
8346 case 0:
8347 putc (c, f);
8348 bytes_in_chunk++;
8349 break;
8350 case 1:
8351 putc ('\\', f);
8352 putc ('0'+((c>>6)&7), f);
8353 putc ('0'+((c>>3)&7), f);
8354 putc ('0'+(c&7), f);
8355 bytes_in_chunk += 4;
8356 break;
8357 default:
8358 putc ('\\', f);
8359 putc (escape, f);
8360 bytes_in_chunk += 2;
8361 break;
8362 }
8363
8364 }
8365 }
8366
8367 if (bytes_in_chunk > 0)
8368 {
8369 putc ('\"', f);
8370 putc ('\n', f);
8371 }
8372 }
8373 #endif
8374
8375 static GTY(()) section *elf_init_array_section;
8376 static GTY(()) section *elf_fini_array_section;
8377
8378 static section *
8379 get_elf_initfini_array_priority_section (int priority,
8380 bool constructor_p)
8381 {
8382 section *sec;
8383 if (priority != DEFAULT_INIT_PRIORITY)
8384 {
8385 char buf[18];
8386 sprintf (buf, "%s.%.5u",
8387 constructor_p ? ".init_array" : ".fini_array",
8388 priority);
8389 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8390 }
8391 else
8392 {
8393 if (constructor_p)
8394 {
8395 if (elf_init_array_section == NULL)
8396 elf_init_array_section
8397 = get_section (".init_array",
8398 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8399 sec = elf_init_array_section;
8400 }
8401 else
8402 {
8403 if (elf_fini_array_section == NULL)
8404 elf_fini_array_section
8405 = get_section (".fini_array",
8406 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8407 sec = elf_fini_array_section;
8408 }
8409 }
8410 return sec;
8411 }
8412
8413 /* Use .init_array section for constructors. */
8414
8415 void
8416 default_elf_init_array_asm_out_constructor (rtx symbol, int priority)
8417 {
8418 section *sec = get_elf_initfini_array_priority_section (priority,
8419 true);
8420 assemble_addr_to_section (symbol, sec);
8421 }
8422
8423 /* Use .fini_array section for destructors. */
8424
8425 void
8426 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority)
8427 {
8428 section *sec = get_elf_initfini_array_priority_section (priority,
8429 false);
8430 assemble_addr_to_section (symbol, sec);
8431 }
8432
8433 /* Default TARGET_ASM_OUTPUT_IDENT hook.
8434
8435 This is a bit of a cheat. The real default is a no-op, but this
8436 hook is the default for all targets with a .ident directive. */
8437
8438 void
8439 default_asm_output_ident_directive (const char *ident_str)
8440 {
8441 const char *ident_asm_op = "\t.ident\t";
8442
8443 /* If we are still in the front end, do not write out the string
8444 to asm_out_file. Instead, add a fake top-level asm statement.
8445 This allows the front ends to use this hook without actually
8446 writing to asm_out_file, to handle #ident or Pragma Ident. */
8447 if (symtab->state == PARSING)
8448 {
8449 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL));
8450 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf));
8451 }
8452 else
8453 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str);
8454 }
8455
8456
8457 /* This function ensures that vtable_map variables are not only
8458 in the comdat section, but that each variable has its own unique
8459 comdat name. Without this the variables end up in the same section
8460 with a single comdat name.
8461
8462 FIXME: resolve_unique_section needs to deal better with
8463 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once
8464 that is fixed, this if-else statement can be replaced with
8465 a single call to "switch_to_section (sect)". */
8466
8467 static void
8468 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED)
8469 {
8470 #if defined (OBJECT_FORMAT_ELF)
8471 targetm.asm_out.named_section (sect->named.name,
8472 sect->named.common.flags
8473 | SECTION_LINKONCE,
8474 DECL_NAME (decl));
8475 in_section = sect;
8476 #else
8477 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here.
8478 Therefore the following check is used.
8479 In case a the target is PE or COFF a comdat group section
8480 is created, e.g. .vtable_map_vars$foo. The linker places
8481 everything in .vtable_map_vars at the end.
8482
8483 A fix could be made in
8484 gcc/config/i386/winnt.c: i386_pe_unique_section. */
8485 if (TARGET_PECOFF)
8486 {
8487 char *name;
8488
8489 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE)
8490 name = ACONCAT ((sect->named.name, "$",
8491 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL));
8492 else
8493 name = ACONCAT ((sect->named.name, "$",
8494 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))),
8495 NULL));
8496
8497 targetm.asm_out.named_section (name,
8498 sect->named.common.flags
8499 | SECTION_LINKONCE,
8500 DECL_NAME (decl));
8501 in_section = sect;
8502 }
8503 else
8504 switch_to_section (sect);
8505 #endif
8506 }
8507
8508 #include "gt-varasm.h"