]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dwarf2out.c
revamp dump and aux output names
[thirdparty/gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2020 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 if (debug_info_level <= DINFO_LEVEL_TERSE)
403 return false;
404
405 enum debug_struct_file criterion;
406 tree type_decl;
407 bool generic = lang_hooks.types.generic_p (type);
408
409 if (generic)
410 criterion = debug_struct_generic[usage];
411 else
412 criterion = debug_struct_ordinary[usage];
413
414 if (criterion == DINFO_STRUCT_FILE_NONE)
415 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
416 if (criterion == DINFO_STRUCT_FILE_ANY)
417 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
418
419 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
420
421 if (type_decl != NULL)
422 {
423 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
425
426 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
427 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
428 }
429
430 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
431 }
432 \f
433 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
434 switch to the data section instead, and write out a synthetic start label
435 for collect2 the first time around. */
436
437 static void
438 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
439 {
440 if (eh_frame_section == 0)
441 {
442 int flags;
443
444 if (EH_TABLES_CAN_BE_READ_ONLY)
445 {
446 int fde_encoding;
447 int per_encoding;
448 int lsda_encoding;
449
450 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
451 /*global=*/0);
452 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
453 /*global=*/1);
454 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
455 /*global=*/0);
456 flags = ((! flag_pic
457 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
458 && (fde_encoding & 0x70) != DW_EH_PE_aligned
459 && (per_encoding & 0x70) != DW_EH_PE_absptr
460 && (per_encoding & 0x70) != DW_EH_PE_aligned
461 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
462 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
463 ? 0 : SECTION_WRITE);
464 }
465 else
466 flags = SECTION_WRITE;
467
468 #ifdef EH_FRAME_SECTION_NAME
469 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
470 #else
471 eh_frame_section = ((flags == SECTION_WRITE)
472 ? data_section : readonly_data_section);
473 #endif /* EH_FRAME_SECTION_NAME */
474 }
475
476 switch_to_section (eh_frame_section);
477
478 #ifdef EH_FRAME_THROUGH_COLLECT2
479 /* We have no special eh_frame section. Emit special labels to guide
480 collect2. */
481 if (!back)
482 {
483 tree label = get_file_function_name ("F");
484 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
485 targetm.asm_out.globalize_label (asm_out_file,
486 IDENTIFIER_POINTER (label));
487 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
488 }
489 #endif
490 }
491
492 /* Switch [BACK] to the eh or debug frame table section, depending on
493 FOR_EH. */
494
495 static void
496 switch_to_frame_table_section (int for_eh, bool back)
497 {
498 if (for_eh)
499 switch_to_eh_frame_section (back);
500 else
501 {
502 if (!debug_frame_section)
503 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
504 SECTION_DEBUG, NULL);
505 switch_to_section (debug_frame_section);
506 }
507 }
508
509 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
510
511 enum dw_cfi_oprnd_type
512 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
513 {
514 switch (cfi)
515 {
516 case DW_CFA_nop:
517 case DW_CFA_GNU_window_save:
518 case DW_CFA_remember_state:
519 case DW_CFA_restore_state:
520 return dw_cfi_oprnd_unused;
521
522 case DW_CFA_set_loc:
523 case DW_CFA_advance_loc1:
524 case DW_CFA_advance_loc2:
525 case DW_CFA_advance_loc4:
526 case DW_CFA_MIPS_advance_loc8:
527 return dw_cfi_oprnd_addr;
528
529 case DW_CFA_offset:
530 case DW_CFA_offset_extended:
531 case DW_CFA_def_cfa:
532 case DW_CFA_offset_extended_sf:
533 case DW_CFA_def_cfa_sf:
534 case DW_CFA_restore:
535 case DW_CFA_restore_extended:
536 case DW_CFA_undefined:
537 case DW_CFA_same_value:
538 case DW_CFA_def_cfa_register:
539 case DW_CFA_register:
540 case DW_CFA_expression:
541 case DW_CFA_val_expression:
542 return dw_cfi_oprnd_reg_num;
543
544 case DW_CFA_def_cfa_offset:
545 case DW_CFA_GNU_args_size:
546 case DW_CFA_def_cfa_offset_sf:
547 return dw_cfi_oprnd_offset;
548
549 case DW_CFA_def_cfa_expression:
550 return dw_cfi_oprnd_loc;
551
552 default:
553 gcc_unreachable ();
554 }
555 }
556
557 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
558
559 enum dw_cfi_oprnd_type
560 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
561 {
562 switch (cfi)
563 {
564 case DW_CFA_def_cfa:
565 case DW_CFA_def_cfa_sf:
566 case DW_CFA_offset:
567 case DW_CFA_offset_extended_sf:
568 case DW_CFA_offset_extended:
569 return dw_cfi_oprnd_offset;
570
571 case DW_CFA_register:
572 return dw_cfi_oprnd_reg_num;
573
574 case DW_CFA_expression:
575 case DW_CFA_val_expression:
576 return dw_cfi_oprnd_loc;
577
578 case DW_CFA_def_cfa_expression:
579 return dw_cfi_oprnd_cfa_loc;
580
581 default:
582 return dw_cfi_oprnd_unused;
583 }
584 }
585
586 /* Output one FDE. */
587
588 static void
589 output_fde (dw_fde_ref fde, bool for_eh, bool second,
590 char *section_start_label, int fde_encoding, char *augmentation,
591 bool any_lsda_needed, int lsda_encoding)
592 {
593 const char *begin, *end;
594 static unsigned int j;
595 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
596
597 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
598 /* empty */ 0);
599 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
600 for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
602 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
603 if (!XCOFF_DEBUGGING_INFO || for_eh)
604 {
605 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
606 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
607 " indicating 64-bit DWARF extension");
608 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
609 "FDE Length");
610 }
611 ASM_OUTPUT_LABEL (asm_out_file, l1);
612
613 if (for_eh)
614 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
615 else
616 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
617 debug_frame_section, "FDE CIE offset");
618
619 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
620 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
621
622 if (for_eh)
623 {
624 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
625 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
626 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
627 "FDE initial location");
628 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
629 end, begin, "FDE address range");
630 }
631 else
632 {
633 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
634 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
635 }
636
637 if (augmentation[0])
638 {
639 if (any_lsda_needed)
640 {
641 int size = size_of_encoded_value (lsda_encoding);
642
643 if (lsda_encoding == DW_EH_PE_aligned)
644 {
645 int offset = ( 4 /* Length */
646 + 4 /* CIE offset */
647 + 2 * size_of_encoded_value (fde_encoding)
648 + 1 /* Augmentation size */ );
649 int pad = -offset & (PTR_SIZE - 1);
650
651 size += pad;
652 gcc_assert (size_of_uleb128 (size) == 1);
653 }
654
655 dw2_asm_output_data_uleb128 (size, "Augmentation size");
656
657 if (fde->uses_eh_lsda)
658 {
659 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
660 fde->funcdef_number);
661 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
662 gen_rtx_SYMBOL_REF (Pmode, l1),
663 false,
664 "Language Specific Data Area");
665 }
666 else
667 {
668 if (lsda_encoding == DW_EH_PE_aligned)
669 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
670 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
671 "Language Specific Data Area (none)");
672 }
673 }
674 else
675 dw2_asm_output_data_uleb128 (0, "Augmentation size");
676 }
677
678 /* Loop through the Call Frame Instructions associated with this FDE. */
679 fde->dw_fde_current_label = begin;
680 {
681 size_t from, until, i;
682
683 from = 0;
684 until = vec_safe_length (fde->dw_fde_cfi);
685
686 if (fde->dw_fde_second_begin == NULL)
687 ;
688 else if (!second)
689 until = fde->dw_fde_switch_cfi_index;
690 else
691 from = fde->dw_fde_switch_cfi_index;
692
693 for (i = from; i < until; i++)
694 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
695 }
696
697 /* If we are to emit a ref/link from function bodies to their frame tables,
698 do it now. This is typically performed to make sure that tables
699 associated with functions are dragged with them and not discarded in
700 garbage collecting links. We need to do this on a per function basis to
701 cope with -ffunction-sections. */
702
703 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
704 /* Switch to the function section, emit the ref to the tables, and
705 switch *back* into the table section. */
706 switch_to_section (function_section (fde->decl));
707 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
708 switch_to_frame_table_section (for_eh, true);
709 #endif
710
711 /* Pad the FDE out to an address sized boundary. */
712 ASM_OUTPUT_ALIGN (asm_out_file,
713 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
714 ASM_OUTPUT_LABEL (asm_out_file, l2);
715
716 j += 2;
717 }
718
719 /* Return true if frame description entry FDE is needed for EH. */
720
721 static bool
722 fde_needed_for_eh_p (dw_fde_ref fde)
723 {
724 if (flag_asynchronous_unwind_tables)
725 return true;
726
727 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
728 return true;
729
730 if (fde->uses_eh_lsda)
731 return true;
732
733 /* If exceptions are enabled, we have collected nothrow info. */
734 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
735 return false;
736
737 return true;
738 }
739
740 /* Output the call frame information used to record information
741 that relates to calculating the frame pointer, and records the
742 location of saved registers. */
743
744 static void
745 output_call_frame_info (int for_eh)
746 {
747 unsigned int i;
748 dw_fde_ref fde;
749 dw_cfi_ref cfi;
750 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
751 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
752 bool any_lsda_needed = false;
753 char augmentation[6];
754 int augmentation_size;
755 int fde_encoding = DW_EH_PE_absptr;
756 int per_encoding = DW_EH_PE_absptr;
757 int lsda_encoding = DW_EH_PE_absptr;
758 int return_reg;
759 rtx personality = NULL;
760 int dw_cie_version;
761
762 /* Don't emit a CIE if there won't be any FDEs. */
763 if (!fde_vec)
764 return;
765
766 /* Nothing to do if the assembler's doing it all. */
767 if (dwarf2out_do_cfi_asm ())
768 return;
769
770 /* If we don't have any functions we'll want to unwind out of, don't emit
771 any EH unwind information. If we make FDEs linkonce, we may have to
772 emit an empty label for an FDE that wouldn't otherwise be emitted. We
773 want to avoid having an FDE kept around when the function it refers to
774 is discarded. Example where this matters: a primary function template
775 in C++ requires EH information, an explicit specialization doesn't. */
776 if (for_eh)
777 {
778 bool any_eh_needed = false;
779
780 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
781 {
782 if (fde->uses_eh_lsda)
783 any_eh_needed = any_lsda_needed = true;
784 else if (fde_needed_for_eh_p (fde))
785 any_eh_needed = true;
786 else if (TARGET_USES_WEAK_UNWIND_INFO)
787 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
788 }
789
790 if (!any_eh_needed)
791 return;
792 }
793
794 /* We're going to be generating comments, so turn on app. */
795 if (flag_debug_asm)
796 app_enable ();
797
798 /* Switch to the proper frame section, first time. */
799 switch_to_frame_table_section (for_eh, false);
800
801 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
802 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
803
804 /* Output the CIE. */
805 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
806 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
807 if (!XCOFF_DEBUGGING_INFO || for_eh)
808 {
809 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
810 dw2_asm_output_data (4, 0xffffffff,
811 "Initial length escape value indicating 64-bit DWARF extension");
812 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
813 "Length of Common Information Entry");
814 }
815 ASM_OUTPUT_LABEL (asm_out_file, l1);
816
817 /* Now that the CIE pointer is PC-relative for EH,
818 use 0 to identify the CIE. */
819 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
820 (for_eh ? 0 : DWARF_CIE_ID),
821 "CIE Identifier Tag");
822
823 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
824 use CIE version 1, unless that would produce incorrect results
825 due to overflowing the return register column. */
826 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
827 dw_cie_version = 1;
828 if (return_reg >= 256 || dwarf_version > 2)
829 dw_cie_version = 3;
830 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
831
832 augmentation[0] = 0;
833 augmentation_size = 0;
834
835 personality = current_unit_personality;
836 if (for_eh)
837 {
838 char *p;
839
840 /* Augmentation:
841 z Indicates that a uleb128 is present to size the
842 augmentation section.
843 L Indicates the encoding (and thus presence) of
844 an LSDA pointer in the FDE augmentation.
845 R Indicates a non-default pointer encoding for
846 FDE code pointers.
847 P Indicates the presence of an encoding + language
848 personality routine in the CIE augmentation. */
849
850 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
851 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
852 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
853
854 p = augmentation + 1;
855 if (personality)
856 {
857 *p++ = 'P';
858 augmentation_size += 1 + size_of_encoded_value (per_encoding);
859 assemble_external_libcall (personality);
860 }
861 if (any_lsda_needed)
862 {
863 *p++ = 'L';
864 augmentation_size += 1;
865 }
866 if (fde_encoding != DW_EH_PE_absptr)
867 {
868 *p++ = 'R';
869 augmentation_size += 1;
870 }
871 if (p > augmentation + 1)
872 {
873 augmentation[0] = 'z';
874 *p = '\0';
875 }
876
877 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
878 if (personality && per_encoding == DW_EH_PE_aligned)
879 {
880 int offset = ( 4 /* Length */
881 + 4 /* CIE Id */
882 + 1 /* CIE version */
883 + strlen (augmentation) + 1 /* Augmentation */
884 + size_of_uleb128 (1) /* Code alignment */
885 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
886 + 1 /* RA column */
887 + 1 /* Augmentation size */
888 + 1 /* Personality encoding */ );
889 int pad = -offset & (PTR_SIZE - 1);
890
891 augmentation_size += pad;
892
893 /* Augmentations should be small, so there's scarce need to
894 iterate for a solution. Die if we exceed one uleb128 byte. */
895 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
896 }
897 }
898
899 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
900 if (dw_cie_version >= 4)
901 {
902 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
903 dw2_asm_output_data (1, 0, "CIE Segment Size");
904 }
905 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
906 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
907 "CIE Data Alignment Factor");
908
909 if (dw_cie_version == 1)
910 dw2_asm_output_data (1, return_reg, "CIE RA Column");
911 else
912 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
913
914 if (augmentation[0])
915 {
916 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
917 if (personality)
918 {
919 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
920 eh_data_format_name (per_encoding));
921 dw2_asm_output_encoded_addr_rtx (per_encoding,
922 personality,
923 true, NULL);
924 }
925
926 if (any_lsda_needed)
927 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
928 eh_data_format_name (lsda_encoding));
929
930 if (fde_encoding != DW_EH_PE_absptr)
931 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
932 eh_data_format_name (fde_encoding));
933 }
934
935 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
936 output_cfi (cfi, NULL, for_eh);
937
938 /* Pad the CIE out to an address sized boundary. */
939 ASM_OUTPUT_ALIGN (asm_out_file,
940 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
941 ASM_OUTPUT_LABEL (asm_out_file, l2);
942
943 /* Loop through all of the FDE's. */
944 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
945 {
946 unsigned int k;
947
948 /* Don't emit EH unwind info for leaf functions that don't need it. */
949 if (for_eh && !fde_needed_for_eh_p (fde))
950 continue;
951
952 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
953 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
954 augmentation, any_lsda_needed, lsda_encoding);
955 }
956
957 if (for_eh && targetm.terminate_dw2_eh_frame_info)
958 dw2_asm_output_data (4, 0, "End of Table");
959
960 /* Turn off app to make assembly quicker. */
961 if (flag_debug_asm)
962 app_disable ();
963 }
964
965 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
966
967 static void
968 dwarf2out_do_cfi_startproc (bool second)
969 {
970 int enc;
971 rtx ref;
972
973 fprintf (asm_out_file, "\t.cfi_startproc\n");
974
975 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
976
977 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
978 eh unwinders. */
979 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
980 return;
981
982 rtx personality = get_personality_function (current_function_decl);
983
984 if (personality)
985 {
986 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
987 ref = personality;
988
989 /* ??? The GAS support isn't entirely consistent. We have to
990 handle indirect support ourselves, but PC-relative is done
991 in the assembler. Further, the assembler can't handle any
992 of the weirder relocation types. */
993 if (enc & DW_EH_PE_indirect)
994 ref = dw2_force_const_mem (ref, true);
995
996 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
997 output_addr_const (asm_out_file, ref);
998 fputc ('\n', asm_out_file);
999 }
1000
1001 if (crtl->uses_eh_lsda)
1002 {
1003 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1004
1005 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1006 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1007 current_function_funcdef_no);
1008 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1009 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1010
1011 if (enc & DW_EH_PE_indirect)
1012 ref = dw2_force_const_mem (ref, true);
1013
1014 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1015 output_addr_const (asm_out_file, ref);
1016 fputc ('\n', asm_out_file);
1017 }
1018 }
1019
1020 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1021 this allocation may be done before pass_final. */
1022
1023 dw_fde_ref
1024 dwarf2out_alloc_current_fde (void)
1025 {
1026 dw_fde_ref fde;
1027
1028 fde = ggc_cleared_alloc<dw_fde_node> ();
1029 fde->decl = current_function_decl;
1030 fde->funcdef_number = current_function_funcdef_no;
1031 fde->fde_index = vec_safe_length (fde_vec);
1032 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1033 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1034 fde->nothrow = crtl->nothrow;
1035 fde->drap_reg = INVALID_REGNUM;
1036 fde->vdrap_reg = INVALID_REGNUM;
1037
1038 /* Record the FDE associated with this function. */
1039 cfun->fde = fde;
1040 vec_safe_push (fde_vec, fde);
1041
1042 return fde;
1043 }
1044
1045 /* Output a marker (i.e. a label) for the beginning of a function, before
1046 the prologue. */
1047
1048 void
1049 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1050 unsigned int column ATTRIBUTE_UNUSED,
1051 const char *file ATTRIBUTE_UNUSED)
1052 {
1053 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1054 char * dup_label;
1055 dw_fde_ref fde;
1056 section *fnsec;
1057 bool do_frame;
1058
1059 current_function_func_begin_label = NULL;
1060
1061 do_frame = dwarf2out_do_frame ();
1062
1063 /* ??? current_function_func_begin_label is also used by except.c for
1064 call-site information. We must emit this label if it might be used. */
1065 if (!do_frame
1066 && (!flag_exceptions
1067 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1068 return;
1069
1070 fnsec = function_section (current_function_decl);
1071 switch_to_section (fnsec);
1072 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1073 current_function_funcdef_no);
1074 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1075 current_function_funcdef_no);
1076 dup_label = xstrdup (label);
1077 current_function_func_begin_label = dup_label;
1078
1079 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1080 if (!do_frame)
1081 return;
1082
1083 /* Unlike the debug version, the EH version of frame unwind info is a per-
1084 function setting so we need to record whether we need it for the unit. */
1085 do_eh_frame |= dwarf2out_do_eh_frame ();
1086
1087 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1088 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1089 would include pass_dwarf2_frame. If we've not created the FDE yet,
1090 do so now. */
1091 fde = cfun->fde;
1092 if (fde == NULL)
1093 fde = dwarf2out_alloc_current_fde ();
1094
1095 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1096 fde->dw_fde_begin = dup_label;
1097 fde->dw_fde_current_label = dup_label;
1098 fde->in_std_section = (fnsec == text_section
1099 || (cold_text_section && fnsec == cold_text_section));
1100
1101 /* We only want to output line number information for the genuine dwarf2
1102 prologue case, not the eh frame case. */
1103 #ifdef DWARF2_DEBUGGING_INFO
1104 if (file)
1105 dwarf2out_source_line (line, column, file, 0, true);
1106 #endif
1107
1108 if (dwarf2out_do_cfi_asm ())
1109 dwarf2out_do_cfi_startproc (false);
1110 else
1111 {
1112 rtx personality = get_personality_function (current_function_decl);
1113 if (!current_unit_personality)
1114 current_unit_personality = personality;
1115
1116 /* We cannot keep a current personality per function as without CFI
1117 asm, at the point where we emit the CFI data, there is no current
1118 function anymore. */
1119 if (personality && current_unit_personality != personality)
1120 sorry ("multiple EH personalities are supported only with assemblers "
1121 "supporting %<.cfi_personality%> directive");
1122 }
1123 }
1124
1125 /* Output a marker (i.e. a label) for the end of the generated code
1126 for a function prologue. This gets called *after* the prologue code has
1127 been generated. */
1128
1129 void
1130 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1131 const char *file ATTRIBUTE_UNUSED)
1132 {
1133 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1134
1135 /* Output a label to mark the endpoint of the code generated for this
1136 function. */
1137 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1138 current_function_funcdef_no);
1139 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1140 current_function_funcdef_no);
1141 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1142 }
1143
1144 /* Output a marker (i.e. a label) for the beginning of the generated code
1145 for a function epilogue. This gets called *before* the prologue code has
1146 been generated. */
1147
1148 void
1149 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1150 const char *file ATTRIBUTE_UNUSED)
1151 {
1152 dw_fde_ref fde = cfun->fde;
1153 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1154
1155 if (fde->dw_fde_vms_begin_epilogue)
1156 return;
1157
1158 /* Output a label to mark the endpoint of the code generated for this
1159 function. */
1160 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1161 current_function_funcdef_no);
1162 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1163 current_function_funcdef_no);
1164 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1165 }
1166
1167 /* Output a marker (i.e. a label) for the absolute end of the generated code
1168 for a function definition. This gets called *after* the epilogue code has
1169 been generated. */
1170
1171 void
1172 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1173 const char *file ATTRIBUTE_UNUSED)
1174 {
1175 dw_fde_ref fde;
1176 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1177
1178 last_var_location_insn = NULL;
1179 cached_next_real_insn = NULL;
1180
1181 if (dwarf2out_do_cfi_asm ())
1182 fprintf (asm_out_file, "\t.cfi_endproc\n");
1183
1184 /* Output a label to mark the endpoint of the code generated for this
1185 function. */
1186 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1187 current_function_funcdef_no);
1188 ASM_OUTPUT_LABEL (asm_out_file, label);
1189 fde = cfun->fde;
1190 gcc_assert (fde != NULL);
1191 if (fde->dw_fde_second_begin == NULL)
1192 fde->dw_fde_end = xstrdup (label);
1193 }
1194
1195 void
1196 dwarf2out_frame_finish (void)
1197 {
1198 /* Output call frame information. */
1199 if (targetm.debug_unwind_info () == UI_DWARF2)
1200 output_call_frame_info (0);
1201
1202 /* Output another copy for the unwinder. */
1203 if (do_eh_frame)
1204 output_call_frame_info (1);
1205 }
1206
1207 /* Note that the current function section is being used for code. */
1208
1209 static void
1210 dwarf2out_note_section_used (void)
1211 {
1212 section *sec = current_function_section ();
1213 if (sec == text_section)
1214 text_section_used = true;
1215 else if (sec == cold_text_section)
1216 cold_text_section_used = true;
1217 }
1218
1219 static void var_location_switch_text_section (void);
1220 static void set_cur_line_info_table (section *);
1221
1222 void
1223 dwarf2out_switch_text_section (void)
1224 {
1225 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1226 section *sect;
1227 dw_fde_ref fde = cfun->fde;
1228
1229 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1230
1231 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1232 current_function_funcdef_no);
1233
1234 fde->dw_fde_second_begin = ggc_strdup (label);
1235 if (!in_cold_section_p)
1236 {
1237 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1238 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1239 }
1240 else
1241 {
1242 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1243 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1244 }
1245 have_multiple_function_sections = true;
1246
1247 /* There is no need to mark used sections when not debugging. */
1248 if (cold_text_section != NULL)
1249 dwarf2out_note_section_used ();
1250
1251 if (dwarf2out_do_cfi_asm ())
1252 fprintf (asm_out_file, "\t.cfi_endproc\n");
1253
1254 /* Now do the real section switch. */
1255 sect = current_function_section ();
1256 switch_to_section (sect);
1257
1258 fde->second_in_std_section
1259 = (sect == text_section
1260 || (cold_text_section && sect == cold_text_section));
1261
1262 if (dwarf2out_do_cfi_asm ())
1263 dwarf2out_do_cfi_startproc (true);
1264
1265 var_location_switch_text_section ();
1266
1267 if (cold_text_section != NULL)
1268 set_cur_line_info_table (sect);
1269 }
1270 \f
1271 /* And now, the subset of the debugging information support code necessary
1272 for emitting location expressions. */
1273
1274 /* Data about a single source file. */
1275 struct GTY((for_user)) dwarf_file_data {
1276 const char * filename;
1277 int emitted_number;
1278 };
1279
1280 /* Describe an entry into the .debug_addr section. */
1281
1282 enum ate_kind {
1283 ate_kind_rtx,
1284 ate_kind_rtx_dtprel,
1285 ate_kind_label
1286 };
1287
1288 struct GTY((for_user)) addr_table_entry {
1289 enum ate_kind kind;
1290 unsigned int refcount;
1291 unsigned int index;
1292 union addr_table_entry_struct_union
1293 {
1294 rtx GTY ((tag ("0"))) rtl;
1295 char * GTY ((tag ("1"))) label;
1296 }
1297 GTY ((desc ("%1.kind"))) addr;
1298 };
1299
1300 typedef unsigned int var_loc_view;
1301
1302 /* Location lists are ranges + location descriptions for that range,
1303 so you can track variables that are in different places over
1304 their entire life. */
1305 typedef struct GTY(()) dw_loc_list_struct {
1306 dw_loc_list_ref dw_loc_next;
1307 const char *begin; /* Label and addr_entry for start of range */
1308 addr_table_entry *begin_entry;
1309 const char *end; /* Label for end of range */
1310 char *ll_symbol; /* Label for beginning of location list.
1311 Only on head of list. */
1312 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1313 const char *section; /* Section this loclist is relative to */
1314 dw_loc_descr_ref expr;
1315 var_loc_view vbegin, vend;
1316 hashval_t hash;
1317 /* True if all addresses in this and subsequent lists are known to be
1318 resolved. */
1319 bool resolved_addr;
1320 /* True if this list has been replaced by dw_loc_next. */
1321 bool replaced;
1322 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1323 section. */
1324 unsigned char emitted : 1;
1325 /* True if hash field is index rather than hash value. */
1326 unsigned char num_assigned : 1;
1327 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1328 unsigned char offset_emitted : 1;
1329 /* True if note_variable_value_in_expr has been called on it. */
1330 unsigned char noted_variable_value : 1;
1331 /* True if the range should be emitted even if begin and end
1332 are the same. */
1333 bool force;
1334 } dw_loc_list_node;
1335
1336 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1337 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1338
1339 /* Convert a DWARF stack opcode into its string name. */
1340
1341 static const char *
1342 dwarf_stack_op_name (unsigned int op)
1343 {
1344 const char *name = get_DW_OP_name (op);
1345
1346 if (name != NULL)
1347 return name;
1348
1349 return "OP_<unknown>";
1350 }
1351
1352 /* Return TRUE iff we're to output location view lists as a separate
1353 attribute next to the location lists, as an extension compatible
1354 with DWARF 2 and above. */
1355
1356 static inline bool
1357 dwarf2out_locviews_in_attribute ()
1358 {
1359 return debug_variable_location_views == 1;
1360 }
1361
1362 /* Return TRUE iff we're to output location view lists as part of the
1363 location lists, as proposed for standardization after DWARF 5. */
1364
1365 static inline bool
1366 dwarf2out_locviews_in_loclist ()
1367 {
1368 #ifndef DW_LLE_view_pair
1369 return false;
1370 #else
1371 return debug_variable_location_views == -1;
1372 #endif
1373 }
1374
1375 /* Return a pointer to a newly allocated location description. Location
1376 descriptions are simple expression terms that can be strung
1377 together to form more complicated location (address) descriptions. */
1378
1379 static inline dw_loc_descr_ref
1380 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1381 unsigned HOST_WIDE_INT oprnd2)
1382 {
1383 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1384
1385 descr->dw_loc_opc = op;
1386 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd1.val_entry = NULL;
1388 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1389 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1390 descr->dw_loc_oprnd2.val_entry = NULL;
1391 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1392
1393 return descr;
1394 }
1395
1396 /* Add a location description term to a location description expression. */
1397
1398 static inline void
1399 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1400 {
1401 dw_loc_descr_ref *d;
1402
1403 /* Find the end of the chain. */
1404 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1405 ;
1406
1407 *d = descr;
1408 }
1409
1410 /* Compare two location operands for exact equality. */
1411
1412 static bool
1413 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1414 {
1415 if (a->val_class != b->val_class)
1416 return false;
1417 switch (a->val_class)
1418 {
1419 case dw_val_class_none:
1420 return true;
1421 case dw_val_class_addr:
1422 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1423
1424 case dw_val_class_offset:
1425 case dw_val_class_unsigned_const:
1426 case dw_val_class_const:
1427 case dw_val_class_unsigned_const_implicit:
1428 case dw_val_class_const_implicit:
1429 case dw_val_class_range_list:
1430 /* These are all HOST_WIDE_INT, signed or unsigned. */
1431 return a->v.val_unsigned == b->v.val_unsigned;
1432
1433 case dw_val_class_loc:
1434 return a->v.val_loc == b->v.val_loc;
1435 case dw_val_class_loc_list:
1436 return a->v.val_loc_list == b->v.val_loc_list;
1437 case dw_val_class_view_list:
1438 return a->v.val_view_list == b->v.val_view_list;
1439 case dw_val_class_die_ref:
1440 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1441 case dw_val_class_fde_ref:
1442 return a->v.val_fde_index == b->v.val_fde_index;
1443 case dw_val_class_symview:
1444 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1445 case dw_val_class_lbl_id:
1446 case dw_val_class_lineptr:
1447 case dw_val_class_macptr:
1448 case dw_val_class_loclistsptr:
1449 case dw_val_class_high_pc:
1450 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1451 case dw_val_class_str:
1452 return a->v.val_str == b->v.val_str;
1453 case dw_val_class_flag:
1454 return a->v.val_flag == b->v.val_flag;
1455 case dw_val_class_file:
1456 case dw_val_class_file_implicit:
1457 return a->v.val_file == b->v.val_file;
1458 case dw_val_class_decl_ref:
1459 return a->v.val_decl_ref == b->v.val_decl_ref;
1460
1461 case dw_val_class_const_double:
1462 return (a->v.val_double.high == b->v.val_double.high
1463 && a->v.val_double.low == b->v.val_double.low);
1464
1465 case dw_val_class_wide_int:
1466 return *a->v.val_wide == *b->v.val_wide;
1467
1468 case dw_val_class_vec:
1469 {
1470 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1471 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1472
1473 return (a_len == b_len
1474 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1475 }
1476
1477 case dw_val_class_data8:
1478 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1479
1480 case dw_val_class_vms_delta:
1481 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1482 && !strcmp (a->v.val_vms_delta.lbl2, b->v.val_vms_delta.lbl2));
1483
1484 case dw_val_class_discr_value:
1485 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1486 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1487 case dw_val_class_discr_list:
1488 /* It makes no sense comparing two discriminant value lists. */
1489 return false;
1490 }
1491 gcc_unreachable ();
1492 }
1493
1494 /* Compare two location atoms for exact equality. */
1495
1496 static bool
1497 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1498 {
1499 if (a->dw_loc_opc != b->dw_loc_opc)
1500 return false;
1501
1502 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1503 address size, but since we always allocate cleared storage it
1504 should be zero for other types of locations. */
1505 if (a->dtprel != b->dtprel)
1506 return false;
1507
1508 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1509 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1510 }
1511
1512 /* Compare two complete location expressions for exact equality. */
1513
1514 bool
1515 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1516 {
1517 while (1)
1518 {
1519 if (a == b)
1520 return true;
1521 if (a == NULL || b == NULL)
1522 return false;
1523 if (!loc_descr_equal_p_1 (a, b))
1524 return false;
1525
1526 a = a->dw_loc_next;
1527 b = b->dw_loc_next;
1528 }
1529 }
1530
1531
1532 /* Add a constant POLY_OFFSET to a location expression. */
1533
1534 static void
1535 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1536 {
1537 dw_loc_descr_ref loc;
1538 HOST_WIDE_INT *p;
1539
1540 gcc_assert (*list_head != NULL);
1541
1542 if (known_eq (poly_offset, 0))
1543 return;
1544
1545 /* Find the end of the chain. */
1546 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1547 ;
1548
1549 HOST_WIDE_INT offset;
1550 if (!poly_offset.is_constant (&offset))
1551 {
1552 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1553 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1554 return;
1555 }
1556
1557 p = NULL;
1558 if (loc->dw_loc_opc == DW_OP_fbreg
1559 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1560 p = &loc->dw_loc_oprnd1.v.val_int;
1561 else if (loc->dw_loc_opc == DW_OP_bregx)
1562 p = &loc->dw_loc_oprnd2.v.val_int;
1563
1564 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1565 offset. Don't optimize if an signed integer overflow would happen. */
1566 if (p != NULL
1567 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1568 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1569 *p += offset;
1570
1571 else if (offset > 0)
1572 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1573
1574 else
1575 {
1576 loc->dw_loc_next
1577 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1578 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1579 }
1580 }
1581
1582 /* Return a pointer to a newly allocated location description for
1583 REG and OFFSET. */
1584
1585 static inline dw_loc_descr_ref
1586 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1587 {
1588 HOST_WIDE_INT const_offset;
1589 if (offset.is_constant (&const_offset))
1590 {
1591 if (reg <= 31)
1592 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1593 const_offset, 0);
1594 else
1595 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1596 }
1597 else
1598 {
1599 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1600 loc_descr_plus_const (&ret, offset);
1601 return ret;
1602 }
1603 }
1604
1605 /* Add a constant OFFSET to a location list. */
1606
1607 static void
1608 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1609 {
1610 dw_loc_list_ref d;
1611 for (d = list_head; d != NULL; d = d->dw_loc_next)
1612 loc_descr_plus_const (&d->expr, offset);
1613 }
1614
1615 #define DWARF_REF_SIZE \
1616 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1617
1618 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1619 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1620 DW_FORM_data16 with 128 bits. */
1621 #define DWARF_LARGEST_DATA_FORM_BITS \
1622 (dwarf_version >= 5 ? 128 : 64)
1623
1624 /* Utility inline function for construction of ops that were GNU extension
1625 before DWARF 5. */
1626 static inline enum dwarf_location_atom
1627 dwarf_OP (enum dwarf_location_atom op)
1628 {
1629 switch (op)
1630 {
1631 case DW_OP_implicit_pointer:
1632 if (dwarf_version < 5)
1633 return DW_OP_GNU_implicit_pointer;
1634 break;
1635
1636 case DW_OP_entry_value:
1637 if (dwarf_version < 5)
1638 return DW_OP_GNU_entry_value;
1639 break;
1640
1641 case DW_OP_const_type:
1642 if (dwarf_version < 5)
1643 return DW_OP_GNU_const_type;
1644 break;
1645
1646 case DW_OP_regval_type:
1647 if (dwarf_version < 5)
1648 return DW_OP_GNU_regval_type;
1649 break;
1650
1651 case DW_OP_deref_type:
1652 if (dwarf_version < 5)
1653 return DW_OP_GNU_deref_type;
1654 break;
1655
1656 case DW_OP_convert:
1657 if (dwarf_version < 5)
1658 return DW_OP_GNU_convert;
1659 break;
1660
1661 case DW_OP_reinterpret:
1662 if (dwarf_version < 5)
1663 return DW_OP_GNU_reinterpret;
1664 break;
1665
1666 case DW_OP_addrx:
1667 if (dwarf_version < 5)
1668 return DW_OP_GNU_addr_index;
1669 break;
1670
1671 case DW_OP_constx:
1672 if (dwarf_version < 5)
1673 return DW_OP_GNU_const_index;
1674 break;
1675
1676 default:
1677 break;
1678 }
1679 return op;
1680 }
1681
1682 /* Similarly for attributes. */
1683 static inline enum dwarf_attribute
1684 dwarf_AT (enum dwarf_attribute at)
1685 {
1686 switch (at)
1687 {
1688 case DW_AT_call_return_pc:
1689 if (dwarf_version < 5)
1690 return DW_AT_low_pc;
1691 break;
1692
1693 case DW_AT_call_tail_call:
1694 if (dwarf_version < 5)
1695 return DW_AT_GNU_tail_call;
1696 break;
1697
1698 case DW_AT_call_origin:
1699 if (dwarf_version < 5)
1700 return DW_AT_abstract_origin;
1701 break;
1702
1703 case DW_AT_call_target:
1704 if (dwarf_version < 5)
1705 return DW_AT_GNU_call_site_target;
1706 break;
1707
1708 case DW_AT_call_target_clobbered:
1709 if (dwarf_version < 5)
1710 return DW_AT_GNU_call_site_target_clobbered;
1711 break;
1712
1713 case DW_AT_call_parameter:
1714 if (dwarf_version < 5)
1715 return DW_AT_abstract_origin;
1716 break;
1717
1718 case DW_AT_call_value:
1719 if (dwarf_version < 5)
1720 return DW_AT_GNU_call_site_value;
1721 break;
1722
1723 case DW_AT_call_data_value:
1724 if (dwarf_version < 5)
1725 return DW_AT_GNU_call_site_data_value;
1726 break;
1727
1728 case DW_AT_call_all_calls:
1729 if (dwarf_version < 5)
1730 return DW_AT_GNU_all_call_sites;
1731 break;
1732
1733 case DW_AT_call_all_tail_calls:
1734 if (dwarf_version < 5)
1735 return DW_AT_GNU_all_tail_call_sites;
1736 break;
1737
1738 case DW_AT_dwo_name:
1739 if (dwarf_version < 5)
1740 return DW_AT_GNU_dwo_name;
1741 break;
1742
1743 case DW_AT_addr_base:
1744 if (dwarf_version < 5)
1745 return DW_AT_GNU_addr_base;
1746 break;
1747
1748 default:
1749 break;
1750 }
1751 return at;
1752 }
1753
1754 /* And similarly for tags. */
1755 static inline enum dwarf_tag
1756 dwarf_TAG (enum dwarf_tag tag)
1757 {
1758 switch (tag)
1759 {
1760 case DW_TAG_call_site:
1761 if (dwarf_version < 5)
1762 return DW_TAG_GNU_call_site;
1763 break;
1764
1765 case DW_TAG_call_site_parameter:
1766 if (dwarf_version < 5)
1767 return DW_TAG_GNU_call_site_parameter;
1768 break;
1769
1770 default:
1771 break;
1772 }
1773 return tag;
1774 }
1775
1776 /* And similarly for forms. */
1777 static inline enum dwarf_form
1778 dwarf_FORM (enum dwarf_form form)
1779 {
1780 switch (form)
1781 {
1782 case DW_FORM_addrx:
1783 if (dwarf_version < 5)
1784 return DW_FORM_GNU_addr_index;
1785 break;
1786
1787 case DW_FORM_strx:
1788 if (dwarf_version < 5)
1789 return DW_FORM_GNU_str_index;
1790 break;
1791
1792 default:
1793 break;
1794 }
1795 return form;
1796 }
1797
1798 static unsigned long int get_base_type_offset (dw_die_ref);
1799
1800 /* Return the size of a location descriptor. */
1801
1802 static unsigned long
1803 size_of_loc_descr (dw_loc_descr_ref loc)
1804 {
1805 unsigned long size = 1;
1806
1807 switch (loc->dw_loc_opc)
1808 {
1809 case DW_OP_addr:
1810 size += DWARF2_ADDR_SIZE;
1811 break;
1812 case DW_OP_GNU_addr_index:
1813 case DW_OP_addrx:
1814 case DW_OP_GNU_const_index:
1815 case DW_OP_constx:
1816 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1817 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1818 break;
1819 case DW_OP_const1u:
1820 case DW_OP_const1s:
1821 size += 1;
1822 break;
1823 case DW_OP_const2u:
1824 case DW_OP_const2s:
1825 size += 2;
1826 break;
1827 case DW_OP_const4u:
1828 case DW_OP_const4s:
1829 size += 4;
1830 break;
1831 case DW_OP_const8u:
1832 case DW_OP_const8s:
1833 size += 8;
1834 break;
1835 case DW_OP_constu:
1836 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1837 break;
1838 case DW_OP_consts:
1839 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1840 break;
1841 case DW_OP_pick:
1842 size += 1;
1843 break;
1844 case DW_OP_plus_uconst:
1845 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1846 break;
1847 case DW_OP_skip:
1848 case DW_OP_bra:
1849 size += 2;
1850 break;
1851 case DW_OP_breg0:
1852 case DW_OP_breg1:
1853 case DW_OP_breg2:
1854 case DW_OP_breg3:
1855 case DW_OP_breg4:
1856 case DW_OP_breg5:
1857 case DW_OP_breg6:
1858 case DW_OP_breg7:
1859 case DW_OP_breg8:
1860 case DW_OP_breg9:
1861 case DW_OP_breg10:
1862 case DW_OP_breg11:
1863 case DW_OP_breg12:
1864 case DW_OP_breg13:
1865 case DW_OP_breg14:
1866 case DW_OP_breg15:
1867 case DW_OP_breg16:
1868 case DW_OP_breg17:
1869 case DW_OP_breg18:
1870 case DW_OP_breg19:
1871 case DW_OP_breg20:
1872 case DW_OP_breg21:
1873 case DW_OP_breg22:
1874 case DW_OP_breg23:
1875 case DW_OP_breg24:
1876 case DW_OP_breg25:
1877 case DW_OP_breg26:
1878 case DW_OP_breg27:
1879 case DW_OP_breg28:
1880 case DW_OP_breg29:
1881 case DW_OP_breg30:
1882 case DW_OP_breg31:
1883 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1884 break;
1885 case DW_OP_regx:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 break;
1888 case DW_OP_fbreg:
1889 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1890 break;
1891 case DW_OP_bregx:
1892 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1893 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1894 break;
1895 case DW_OP_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 break;
1898 case DW_OP_bit_piece:
1899 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1900 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1901 break;
1902 case DW_OP_deref_size:
1903 case DW_OP_xderef_size:
1904 size += 1;
1905 break;
1906 case DW_OP_call2:
1907 size += 2;
1908 break;
1909 case DW_OP_call4:
1910 size += 4;
1911 break;
1912 case DW_OP_call_ref:
1913 case DW_OP_GNU_variable_value:
1914 size += DWARF_REF_SIZE;
1915 break;
1916 case DW_OP_implicit_value:
1917 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1918 + loc->dw_loc_oprnd1.v.val_unsigned;
1919 break;
1920 case DW_OP_implicit_pointer:
1921 case DW_OP_GNU_implicit_pointer:
1922 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1923 break;
1924 case DW_OP_entry_value:
1925 case DW_OP_GNU_entry_value:
1926 {
1927 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1928 size += size_of_uleb128 (op_size) + op_size;
1929 break;
1930 }
1931 case DW_OP_const_type:
1932 case DW_OP_GNU_const_type:
1933 {
1934 unsigned long o
1935 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1936 size += size_of_uleb128 (o) + 1;
1937 switch (loc->dw_loc_oprnd2.val_class)
1938 {
1939 case dw_val_class_vec:
1940 size += loc->dw_loc_oprnd2.v.val_vec.length
1941 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1942 break;
1943 case dw_val_class_const:
1944 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_const_double:
1947 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1948 break;
1949 case dw_val_class_wide_int:
1950 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1951 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1952 break;
1953 default:
1954 gcc_unreachable ();
1955 }
1956 break;
1957 }
1958 case DW_OP_regval_type:
1959 case DW_OP_GNU_regval_type:
1960 {
1961 unsigned long o
1962 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1963 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1964 + size_of_uleb128 (o);
1965 }
1966 break;
1967 case DW_OP_deref_type:
1968 case DW_OP_GNU_deref_type:
1969 {
1970 unsigned long o
1971 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1972 size += 1 + size_of_uleb128 (o);
1973 }
1974 break;
1975 case DW_OP_convert:
1976 case DW_OP_reinterpret:
1977 case DW_OP_GNU_convert:
1978 case DW_OP_GNU_reinterpret:
1979 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1980 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1981 else
1982 {
1983 unsigned long o
1984 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1985 size += size_of_uleb128 (o);
1986 }
1987 break;
1988 case DW_OP_GNU_parameter_ref:
1989 size += 4;
1990 break;
1991 default:
1992 break;
1993 }
1994
1995 return size;
1996 }
1997
1998 /* Return the size of a series of location descriptors. */
1999
2000 unsigned long
2001 size_of_locs (dw_loc_descr_ref loc)
2002 {
2003 dw_loc_descr_ref l;
2004 unsigned long size;
2005
2006 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2007 field, to avoid writing to a PCH file. */
2008 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2009 {
2010 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2011 break;
2012 size += size_of_loc_descr (l);
2013 }
2014 if (! l)
2015 return size;
2016
2017 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2018 {
2019 l->dw_loc_addr = size;
2020 size += size_of_loc_descr (l);
2021 }
2022
2023 return size;
2024 }
2025
2026 /* Return the size of the value in a DW_AT_discr_value attribute. */
2027
2028 static int
2029 size_of_discr_value (dw_discr_value *discr_value)
2030 {
2031 if (discr_value->pos)
2032 return size_of_uleb128 (discr_value->v.uval);
2033 else
2034 return size_of_sleb128 (discr_value->v.sval);
2035 }
2036
2037 /* Return the size of the value in a DW_AT_discr_list attribute. */
2038
2039 static int
2040 size_of_discr_list (dw_discr_list_ref discr_list)
2041 {
2042 int size = 0;
2043
2044 for (dw_discr_list_ref list = discr_list;
2045 list != NULL;
2046 list = list->dw_discr_next)
2047 {
2048 /* One byte for the discriminant value descriptor, and then one or two
2049 LEB128 numbers, depending on whether it's a single case label or a
2050 range label. */
2051 size += 1;
2052 size += size_of_discr_value (&list->dw_discr_lower_bound);
2053 if (list->dw_discr_range != 0)
2054 size += size_of_discr_value (&list->dw_discr_upper_bound);
2055 }
2056 return size;
2057 }
2058
2059 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2060 static void get_ref_die_offset_label (char *, dw_die_ref);
2061 static unsigned long int get_ref_die_offset (dw_die_ref);
2062
2063 /* Output location description stack opcode's operands (if any).
2064 The for_eh_or_skip parameter controls whether register numbers are
2065 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2066 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2067 info). This should be suppressed for the cases that have not been converted
2068 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2069
2070 static void
2071 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2072 {
2073 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2074 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2075
2076 switch (loc->dw_loc_opc)
2077 {
2078 #ifdef DWARF2_DEBUGGING_INFO
2079 case DW_OP_const2u:
2080 case DW_OP_const2s:
2081 dw2_asm_output_data (2, val1->v.val_int, NULL);
2082 break;
2083 case DW_OP_const4u:
2084 if (loc->dtprel)
2085 {
2086 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2087 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2088 val1->v.val_addr);
2089 fputc ('\n', asm_out_file);
2090 break;
2091 }
2092 /* FALLTHRU */
2093 case DW_OP_const4s:
2094 dw2_asm_output_data (4, val1->v.val_int, NULL);
2095 break;
2096 case DW_OP_const8u:
2097 if (loc->dtprel)
2098 {
2099 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2100 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2101 val1->v.val_addr);
2102 fputc ('\n', asm_out_file);
2103 break;
2104 }
2105 /* FALLTHRU */
2106 case DW_OP_const8s:
2107 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2108 dw2_asm_output_data (8, val1->v.val_int, NULL);
2109 break;
2110 case DW_OP_skip:
2111 case DW_OP_bra:
2112 {
2113 int offset;
2114
2115 gcc_assert (val1->val_class == dw_val_class_loc);
2116 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2117
2118 dw2_asm_output_data (2, offset, NULL);
2119 }
2120 break;
2121 case DW_OP_implicit_value:
2122 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2123 switch (val2->val_class)
2124 {
2125 case dw_val_class_const:
2126 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2127 break;
2128 case dw_val_class_vec:
2129 {
2130 unsigned int elt_size = val2->v.val_vec.elt_size;
2131 unsigned int len = val2->v.val_vec.length;
2132 unsigned int i;
2133 unsigned char *p;
2134
2135 if (elt_size > sizeof (HOST_WIDE_INT))
2136 {
2137 elt_size /= 2;
2138 len *= 2;
2139 }
2140 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2141 i < len;
2142 i++, p += elt_size)
2143 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2144 "fp or vector constant word %u", i);
2145 }
2146 break;
2147 case dw_val_class_const_double:
2148 {
2149 unsigned HOST_WIDE_INT first, second;
2150
2151 if (WORDS_BIG_ENDIAN)
2152 {
2153 first = val2->v.val_double.high;
2154 second = val2->v.val_double.low;
2155 }
2156 else
2157 {
2158 first = val2->v.val_double.low;
2159 second = val2->v.val_double.high;
2160 }
2161 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2162 first, NULL);
2163 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2164 second, NULL);
2165 }
2166 break;
2167 case dw_val_class_wide_int:
2168 {
2169 int i;
2170 int len = get_full_len (*val2->v.val_wide);
2171 if (WORDS_BIG_ENDIAN)
2172 for (i = len - 1; i >= 0; --i)
2173 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2174 val2->v.val_wide->elt (i), NULL);
2175 else
2176 for (i = 0; i < len; ++i)
2177 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2178 val2->v.val_wide->elt (i), NULL);
2179 }
2180 break;
2181 case dw_val_class_addr:
2182 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2183 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2184 break;
2185 default:
2186 gcc_unreachable ();
2187 }
2188 break;
2189 #else
2190 case DW_OP_const2u:
2191 case DW_OP_const2s:
2192 case DW_OP_const4u:
2193 case DW_OP_const4s:
2194 case DW_OP_const8u:
2195 case DW_OP_const8s:
2196 case DW_OP_skip:
2197 case DW_OP_bra:
2198 case DW_OP_implicit_value:
2199 /* We currently don't make any attempt to make sure these are
2200 aligned properly like we do for the main unwind info, so
2201 don't support emitting things larger than a byte if we're
2202 only doing unwinding. */
2203 gcc_unreachable ();
2204 #endif
2205 case DW_OP_const1u:
2206 case DW_OP_const1s:
2207 dw2_asm_output_data (1, val1->v.val_int, NULL);
2208 break;
2209 case DW_OP_constu:
2210 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2211 break;
2212 case DW_OP_consts:
2213 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_pick:
2216 dw2_asm_output_data (1, val1->v.val_int, NULL);
2217 break;
2218 case DW_OP_plus_uconst:
2219 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2220 break;
2221 case DW_OP_breg0:
2222 case DW_OP_breg1:
2223 case DW_OP_breg2:
2224 case DW_OP_breg3:
2225 case DW_OP_breg4:
2226 case DW_OP_breg5:
2227 case DW_OP_breg6:
2228 case DW_OP_breg7:
2229 case DW_OP_breg8:
2230 case DW_OP_breg9:
2231 case DW_OP_breg10:
2232 case DW_OP_breg11:
2233 case DW_OP_breg12:
2234 case DW_OP_breg13:
2235 case DW_OP_breg14:
2236 case DW_OP_breg15:
2237 case DW_OP_breg16:
2238 case DW_OP_breg17:
2239 case DW_OP_breg18:
2240 case DW_OP_breg19:
2241 case DW_OP_breg20:
2242 case DW_OP_breg21:
2243 case DW_OP_breg22:
2244 case DW_OP_breg23:
2245 case DW_OP_breg24:
2246 case DW_OP_breg25:
2247 case DW_OP_breg26:
2248 case DW_OP_breg27:
2249 case DW_OP_breg28:
2250 case DW_OP_breg29:
2251 case DW_OP_breg30:
2252 case DW_OP_breg31:
2253 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2254 break;
2255 case DW_OP_regx:
2256 {
2257 unsigned r = val1->v.val_unsigned;
2258 if (for_eh_or_skip >= 0)
2259 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2260 gcc_assert (size_of_uleb128 (r)
2261 == size_of_uleb128 (val1->v.val_unsigned));
2262 dw2_asm_output_data_uleb128 (r, NULL);
2263 }
2264 break;
2265 case DW_OP_fbreg:
2266 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2267 break;
2268 case DW_OP_bregx:
2269 {
2270 unsigned r = val1->v.val_unsigned;
2271 if (for_eh_or_skip >= 0)
2272 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2273 gcc_assert (size_of_uleb128 (r)
2274 == size_of_uleb128 (val1->v.val_unsigned));
2275 dw2_asm_output_data_uleb128 (r, NULL);
2276 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2277 }
2278 break;
2279 case DW_OP_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 break;
2282 case DW_OP_bit_piece:
2283 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2284 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2285 break;
2286 case DW_OP_deref_size:
2287 case DW_OP_xderef_size:
2288 dw2_asm_output_data (1, val1->v.val_int, NULL);
2289 break;
2290
2291 case DW_OP_addr:
2292 if (loc->dtprel)
2293 {
2294 if (targetm.asm_out.output_dwarf_dtprel)
2295 {
2296 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2297 DWARF2_ADDR_SIZE,
2298 val1->v.val_addr);
2299 fputc ('\n', asm_out_file);
2300 }
2301 else
2302 gcc_unreachable ();
2303 }
2304 else
2305 {
2306 #ifdef DWARF2_DEBUGGING_INFO
2307 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2308 #else
2309 gcc_unreachable ();
2310 #endif
2311 }
2312 break;
2313
2314 case DW_OP_GNU_addr_index:
2315 case DW_OP_addrx:
2316 case DW_OP_GNU_const_index:
2317 case DW_OP_constx:
2318 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2319 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2320 "(index into .debug_addr)");
2321 break;
2322
2323 case DW_OP_call2:
2324 case DW_OP_call4:
2325 {
2326 unsigned long die_offset
2327 = get_ref_die_offset (val1->v.val_die_ref.die);
2328 /* Make sure the offset has been computed and that we can encode it as
2329 an operand. */
2330 gcc_assert (die_offset > 0
2331 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2332 ? 0xffff
2333 : 0xffffffff));
2334 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2335 die_offset, NULL);
2336 }
2337 break;
2338
2339 case DW_OP_call_ref:
2340 case DW_OP_GNU_variable_value:
2341 {
2342 char label[MAX_ARTIFICIAL_LABEL_BYTES
2343 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2344 gcc_assert (val1->val_class == dw_val_class_die_ref);
2345 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2346 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2347 }
2348 break;
2349
2350 case DW_OP_implicit_pointer:
2351 case DW_OP_GNU_implicit_pointer:
2352 {
2353 char label[MAX_ARTIFICIAL_LABEL_BYTES
2354 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2355 gcc_assert (val1->val_class == dw_val_class_die_ref);
2356 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2357 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2358 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2359 }
2360 break;
2361
2362 case DW_OP_entry_value:
2363 case DW_OP_GNU_entry_value:
2364 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2365 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2366 break;
2367
2368 case DW_OP_const_type:
2369 case DW_OP_GNU_const_type:
2370 {
2371 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2372 gcc_assert (o);
2373 dw2_asm_output_data_uleb128 (o, NULL);
2374 switch (val2->val_class)
2375 {
2376 case dw_val_class_const:
2377 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2378 dw2_asm_output_data (1, l, NULL);
2379 dw2_asm_output_data (l, val2->v.val_int, NULL);
2380 break;
2381 case dw_val_class_vec:
2382 {
2383 unsigned int elt_size = val2->v.val_vec.elt_size;
2384 unsigned int len = val2->v.val_vec.length;
2385 unsigned int i;
2386 unsigned char *p;
2387
2388 l = len * elt_size;
2389 dw2_asm_output_data (1, l, NULL);
2390 if (elt_size > sizeof (HOST_WIDE_INT))
2391 {
2392 elt_size /= 2;
2393 len *= 2;
2394 }
2395 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2396 i < len;
2397 i++, p += elt_size)
2398 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2399 "fp or vector constant word %u", i);
2400 }
2401 break;
2402 case dw_val_class_const_double:
2403 {
2404 unsigned HOST_WIDE_INT first, second;
2405 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2406
2407 dw2_asm_output_data (1, 2 * l, NULL);
2408 if (WORDS_BIG_ENDIAN)
2409 {
2410 first = val2->v.val_double.high;
2411 second = val2->v.val_double.low;
2412 }
2413 else
2414 {
2415 first = val2->v.val_double.low;
2416 second = val2->v.val_double.high;
2417 }
2418 dw2_asm_output_data (l, first, NULL);
2419 dw2_asm_output_data (l, second, NULL);
2420 }
2421 break;
2422 case dw_val_class_wide_int:
2423 {
2424 int i;
2425 int len = get_full_len (*val2->v.val_wide);
2426 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2427
2428 dw2_asm_output_data (1, len * l, NULL);
2429 if (WORDS_BIG_ENDIAN)
2430 for (i = len - 1; i >= 0; --i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 else
2433 for (i = 0; i < len; ++i)
2434 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2435 }
2436 break;
2437 default:
2438 gcc_unreachable ();
2439 }
2440 }
2441 break;
2442 case DW_OP_regval_type:
2443 case DW_OP_GNU_regval_type:
2444 {
2445 unsigned r = val1->v.val_unsigned;
2446 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2447 gcc_assert (o);
2448 if (for_eh_or_skip >= 0)
2449 {
2450 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2451 gcc_assert (size_of_uleb128 (r)
2452 == size_of_uleb128 (val1->v.val_unsigned));
2453 }
2454 dw2_asm_output_data_uleb128 (r, NULL);
2455 dw2_asm_output_data_uleb128 (o, NULL);
2456 }
2457 break;
2458 case DW_OP_deref_type:
2459 case DW_OP_GNU_deref_type:
2460 {
2461 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2462 gcc_assert (o);
2463 dw2_asm_output_data (1, val1->v.val_int, NULL);
2464 dw2_asm_output_data_uleb128 (o, NULL);
2465 }
2466 break;
2467 case DW_OP_convert:
2468 case DW_OP_reinterpret:
2469 case DW_OP_GNU_convert:
2470 case DW_OP_GNU_reinterpret:
2471 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2472 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2473 else
2474 {
2475 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2476 gcc_assert (o);
2477 dw2_asm_output_data_uleb128 (o, NULL);
2478 }
2479 break;
2480
2481 case DW_OP_GNU_parameter_ref:
2482 {
2483 unsigned long o;
2484 gcc_assert (val1->val_class == dw_val_class_die_ref);
2485 o = get_ref_die_offset (val1->v.val_die_ref.die);
2486 dw2_asm_output_data (4, o, NULL);
2487 }
2488 break;
2489
2490 default:
2491 /* Other codes have no operands. */
2492 break;
2493 }
2494 }
2495
2496 /* Output a sequence of location operations.
2497 The for_eh_or_skip parameter controls whether register numbers are
2498 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2499 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2500 info). This should be suppressed for the cases that have not been converted
2501 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2502
2503 void
2504 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2505 {
2506 for (; loc != NULL; loc = loc->dw_loc_next)
2507 {
2508 enum dwarf_location_atom opc = loc->dw_loc_opc;
2509 /* Output the opcode. */
2510 if (for_eh_or_skip >= 0
2511 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2512 {
2513 unsigned r = (opc - DW_OP_breg0);
2514 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2515 gcc_assert (r <= 31);
2516 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2517 }
2518 else if (for_eh_or_skip >= 0
2519 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2520 {
2521 unsigned r = (opc - DW_OP_reg0);
2522 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2523 gcc_assert (r <= 31);
2524 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2525 }
2526
2527 dw2_asm_output_data (1, opc,
2528 "%s", dwarf_stack_op_name (opc));
2529
2530 /* Output the operand(s) (if any). */
2531 output_loc_operands (loc, for_eh_or_skip);
2532 }
2533 }
2534
2535 /* Output location description stack opcode's operands (if any).
2536 The output is single bytes on a line, suitable for .cfi_escape. */
2537
2538 static void
2539 output_loc_operands_raw (dw_loc_descr_ref loc)
2540 {
2541 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2542 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2543
2544 switch (loc->dw_loc_opc)
2545 {
2546 case DW_OP_addr:
2547 case DW_OP_GNU_addr_index:
2548 case DW_OP_addrx:
2549 case DW_OP_GNU_const_index:
2550 case DW_OP_constx:
2551 case DW_OP_implicit_value:
2552 /* We cannot output addresses in .cfi_escape, only bytes. */
2553 gcc_unreachable ();
2554
2555 case DW_OP_const1u:
2556 case DW_OP_const1s:
2557 case DW_OP_pick:
2558 case DW_OP_deref_size:
2559 case DW_OP_xderef_size:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_raw (1, val1->v.val_int);
2562 break;
2563
2564 case DW_OP_const2u:
2565 case DW_OP_const2s:
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (2, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_const4u:
2571 case DW_OP_const4s:
2572 fputc (',', asm_out_file);
2573 dw2_asm_output_data_raw (4, val1->v.val_int);
2574 break;
2575
2576 case DW_OP_const8u:
2577 case DW_OP_const8s:
2578 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2579 fputc (',', asm_out_file);
2580 dw2_asm_output_data_raw (8, val1->v.val_int);
2581 break;
2582
2583 case DW_OP_skip:
2584 case DW_OP_bra:
2585 {
2586 int offset;
2587
2588 gcc_assert (val1->val_class == dw_val_class_loc);
2589 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2590
2591 fputc (',', asm_out_file);
2592 dw2_asm_output_data_raw (2, offset);
2593 }
2594 break;
2595
2596 case DW_OP_regx:
2597 {
2598 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2599 gcc_assert (size_of_uleb128 (r)
2600 == size_of_uleb128 (val1->v.val_unsigned));
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_uleb128_raw (r);
2603 }
2604 break;
2605
2606 case DW_OP_constu:
2607 case DW_OP_plus_uconst:
2608 case DW_OP_piece:
2609 fputc (',', asm_out_file);
2610 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2611 break;
2612
2613 case DW_OP_bit_piece:
2614 fputc (',', asm_out_file);
2615 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2616 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2617 break;
2618
2619 case DW_OP_consts:
2620 case DW_OP_breg0:
2621 case DW_OP_breg1:
2622 case DW_OP_breg2:
2623 case DW_OP_breg3:
2624 case DW_OP_breg4:
2625 case DW_OP_breg5:
2626 case DW_OP_breg6:
2627 case DW_OP_breg7:
2628 case DW_OP_breg8:
2629 case DW_OP_breg9:
2630 case DW_OP_breg10:
2631 case DW_OP_breg11:
2632 case DW_OP_breg12:
2633 case DW_OP_breg13:
2634 case DW_OP_breg14:
2635 case DW_OP_breg15:
2636 case DW_OP_breg16:
2637 case DW_OP_breg17:
2638 case DW_OP_breg18:
2639 case DW_OP_breg19:
2640 case DW_OP_breg20:
2641 case DW_OP_breg21:
2642 case DW_OP_breg22:
2643 case DW_OP_breg23:
2644 case DW_OP_breg24:
2645 case DW_OP_breg25:
2646 case DW_OP_breg26:
2647 case DW_OP_breg27:
2648 case DW_OP_breg28:
2649 case DW_OP_breg29:
2650 case DW_OP_breg30:
2651 case DW_OP_breg31:
2652 case DW_OP_fbreg:
2653 fputc (',', asm_out_file);
2654 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2655 break;
2656
2657 case DW_OP_bregx:
2658 {
2659 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2660 gcc_assert (size_of_uleb128 (r)
2661 == size_of_uleb128 (val1->v.val_unsigned));
2662 fputc (',', asm_out_file);
2663 dw2_asm_output_data_uleb128_raw (r);
2664 fputc (',', asm_out_file);
2665 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2666 }
2667 break;
2668
2669 case DW_OP_implicit_pointer:
2670 case DW_OP_entry_value:
2671 case DW_OP_const_type:
2672 case DW_OP_regval_type:
2673 case DW_OP_deref_type:
2674 case DW_OP_convert:
2675 case DW_OP_reinterpret:
2676 case DW_OP_GNU_implicit_pointer:
2677 case DW_OP_GNU_entry_value:
2678 case DW_OP_GNU_const_type:
2679 case DW_OP_GNU_regval_type:
2680 case DW_OP_GNU_deref_type:
2681 case DW_OP_GNU_convert:
2682 case DW_OP_GNU_reinterpret:
2683 case DW_OP_GNU_parameter_ref:
2684 gcc_unreachable ();
2685 break;
2686
2687 default:
2688 /* Other codes have no operands. */
2689 break;
2690 }
2691 }
2692
2693 void
2694 output_loc_sequence_raw (dw_loc_descr_ref loc)
2695 {
2696 while (1)
2697 {
2698 enum dwarf_location_atom opc = loc->dw_loc_opc;
2699 /* Output the opcode. */
2700 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2701 {
2702 unsigned r = (opc - DW_OP_breg0);
2703 r = DWARF2_FRAME_REG_OUT (r, 1);
2704 gcc_assert (r <= 31);
2705 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2706 }
2707 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2708 {
2709 unsigned r = (opc - DW_OP_reg0);
2710 r = DWARF2_FRAME_REG_OUT (r, 1);
2711 gcc_assert (r <= 31);
2712 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2713 }
2714 /* Output the opcode. */
2715 fprintf (asm_out_file, "%#x", opc);
2716 output_loc_operands_raw (loc);
2717
2718 if (!loc->dw_loc_next)
2719 break;
2720 loc = loc->dw_loc_next;
2721
2722 fputc (',', asm_out_file);
2723 }
2724 }
2725
2726 /* This function builds a dwarf location descriptor sequence from a
2727 dw_cfa_location, adding the given OFFSET to the result of the
2728 expression. */
2729
2730 struct dw_loc_descr_node *
2731 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2732 {
2733 struct dw_loc_descr_node *head, *tmp;
2734
2735 offset += cfa->offset;
2736
2737 if (cfa->indirect)
2738 {
2739 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2740 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2741 head->dw_loc_oprnd1.val_entry = NULL;
2742 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2743 add_loc_descr (&head, tmp);
2744 loc_descr_plus_const (&head, offset);
2745 }
2746 else
2747 head = new_reg_loc_descr (cfa->reg, offset);
2748
2749 return head;
2750 }
2751
2752 /* This function builds a dwarf location descriptor sequence for
2753 the address at OFFSET from the CFA when stack is aligned to
2754 ALIGNMENT byte. */
2755
2756 struct dw_loc_descr_node *
2757 build_cfa_aligned_loc (dw_cfa_location *cfa,
2758 poly_int64 offset, HOST_WIDE_INT alignment)
2759 {
2760 struct dw_loc_descr_node *head;
2761 unsigned int dwarf_fp
2762 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2763
2764 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2765 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2766 {
2767 head = new_reg_loc_descr (dwarf_fp, 0);
2768 add_loc_descr (&head, int_loc_descriptor (alignment));
2769 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2770 loc_descr_plus_const (&head, offset);
2771 }
2772 else
2773 head = new_reg_loc_descr (dwarf_fp, offset);
2774 return head;
2775 }
2776 \f
2777 /* And now, the support for symbolic debugging information. */
2778
2779 /* .debug_str support. */
2780
2781 static void dwarf2out_init (const char *);
2782 static void dwarf2out_finish (const char *);
2783 static void dwarf2out_early_finish (const char *);
2784 static void dwarf2out_assembly_start (void);
2785 static void dwarf2out_define (unsigned int, const char *);
2786 static void dwarf2out_undef (unsigned int, const char *);
2787 static void dwarf2out_start_source_file (unsigned, const char *);
2788 static void dwarf2out_end_source_file (unsigned);
2789 static void dwarf2out_function_decl (tree);
2790 static void dwarf2out_begin_block (unsigned, unsigned);
2791 static void dwarf2out_end_block (unsigned, unsigned);
2792 static bool dwarf2out_ignore_block (const_tree);
2793 static void dwarf2out_early_global_decl (tree);
2794 static void dwarf2out_late_global_decl (tree);
2795 static void dwarf2out_type_decl (tree, int);
2796 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2797 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2798 dw_die_ref);
2799 static void dwarf2out_abstract_function (tree);
2800 static void dwarf2out_var_location (rtx_insn *);
2801 static void dwarf2out_inline_entry (tree);
2802 static void dwarf2out_size_function (tree);
2803 static void dwarf2out_begin_function (tree);
2804 static void dwarf2out_end_function (unsigned int);
2805 static void dwarf2out_register_main_translation_unit (tree unit);
2806 static void dwarf2out_set_name (tree, tree);
2807 static void dwarf2out_register_external_die (tree decl, const char *sym,
2808 unsigned HOST_WIDE_INT off);
2809 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2810 unsigned HOST_WIDE_INT *off);
2811
2812 /* The debug hooks structure. */
2813
2814 const struct gcc_debug_hooks dwarf2_debug_hooks =
2815 {
2816 dwarf2out_init,
2817 dwarf2out_finish,
2818 dwarf2out_early_finish,
2819 dwarf2out_assembly_start,
2820 dwarf2out_define,
2821 dwarf2out_undef,
2822 dwarf2out_start_source_file,
2823 dwarf2out_end_source_file,
2824 dwarf2out_begin_block,
2825 dwarf2out_end_block,
2826 dwarf2out_ignore_block,
2827 dwarf2out_source_line,
2828 dwarf2out_begin_prologue,
2829 #if VMS_DEBUGGING_INFO
2830 dwarf2out_vms_end_prologue,
2831 dwarf2out_vms_begin_epilogue,
2832 #else
2833 debug_nothing_int_charstar,
2834 debug_nothing_int_charstar,
2835 #endif
2836 dwarf2out_end_epilogue,
2837 dwarf2out_begin_function,
2838 dwarf2out_end_function, /* end_function */
2839 dwarf2out_register_main_translation_unit,
2840 dwarf2out_function_decl, /* function_decl */
2841 dwarf2out_early_global_decl,
2842 dwarf2out_late_global_decl,
2843 dwarf2out_type_decl, /* type_decl */
2844 dwarf2out_imported_module_or_decl,
2845 dwarf2out_die_ref_for_decl,
2846 dwarf2out_register_external_die,
2847 debug_nothing_tree, /* deferred_inline_function */
2848 /* The DWARF 2 backend tries to reduce debugging bloat by not
2849 emitting the abstract description of inline functions until
2850 something tries to reference them. */
2851 dwarf2out_abstract_function, /* outlining_inline_function */
2852 debug_nothing_rtx_code_label, /* label */
2853 debug_nothing_int, /* handle_pch */
2854 dwarf2out_var_location,
2855 dwarf2out_inline_entry, /* inline_entry */
2856 dwarf2out_size_function, /* size_function */
2857 dwarf2out_switch_text_section,
2858 dwarf2out_set_name,
2859 1, /* start_end_main_source_file */
2860 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2861 };
2862
2863 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2864 {
2865 dwarf2out_init,
2866 debug_nothing_charstar,
2867 debug_nothing_charstar,
2868 dwarf2out_assembly_start,
2869 debug_nothing_int_charstar,
2870 debug_nothing_int_charstar,
2871 debug_nothing_int_charstar,
2872 debug_nothing_int,
2873 debug_nothing_int_int, /* begin_block */
2874 debug_nothing_int_int, /* end_block */
2875 debug_true_const_tree, /* ignore_block */
2876 dwarf2out_source_line, /* source_line */
2877 debug_nothing_int_int_charstar, /* begin_prologue */
2878 debug_nothing_int_charstar, /* end_prologue */
2879 debug_nothing_int_charstar, /* begin_epilogue */
2880 debug_nothing_int_charstar, /* end_epilogue */
2881 debug_nothing_tree, /* begin_function */
2882 debug_nothing_int, /* end_function */
2883 debug_nothing_tree, /* register_main_translation_unit */
2884 debug_nothing_tree, /* function_decl */
2885 debug_nothing_tree, /* early_global_decl */
2886 debug_nothing_tree, /* late_global_decl */
2887 debug_nothing_tree_int, /* type_decl */
2888 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2889 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2890 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2891 debug_nothing_tree, /* deferred_inline_function */
2892 debug_nothing_tree, /* outlining_inline_function */
2893 debug_nothing_rtx_code_label, /* label */
2894 debug_nothing_int, /* handle_pch */
2895 debug_nothing_rtx_insn, /* var_location */
2896 debug_nothing_tree, /* inline_entry */
2897 debug_nothing_tree, /* size_function */
2898 debug_nothing_void, /* switch_text_section */
2899 debug_nothing_tree_tree, /* set_name */
2900 0, /* start_end_main_source_file */
2901 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2902 };
2903 \f
2904 /* NOTE: In the comments in this file, many references are made to
2905 "Debugging Information Entries". This term is abbreviated as `DIE'
2906 throughout the remainder of this file. */
2907
2908 /* An internal representation of the DWARF output is built, and then
2909 walked to generate the DWARF debugging info. The walk of the internal
2910 representation is done after the entire program has been compiled.
2911 The types below are used to describe the internal representation. */
2912
2913 /* Whether to put type DIEs into their own section .debug_types instead
2914 of making them part of the .debug_info section. Only supported for
2915 Dwarf V4 or higher and the user didn't disable them through
2916 -fno-debug-types-section. It is more efficient to put them in a
2917 separate comdat sections since the linker will then be able to
2918 remove duplicates. But not all tools support .debug_types sections
2919 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2920 it is DW_UT_type unit type in .debug_info section. For late LTO
2921 debug there should be almost no types emitted so avoid enabling
2922 -fdebug-types-section there. */
2923
2924 #define use_debug_types (dwarf_version >= 4 \
2925 && flag_debug_types_section \
2926 && !in_lto_p)
2927
2928 /* Various DIE's use offsets relative to the beginning of the
2929 .debug_info section to refer to each other. */
2930
2931 typedef long int dw_offset;
2932
2933 struct comdat_type_node;
2934
2935 /* The entries in the line_info table more-or-less mirror the opcodes
2936 that are used in the real dwarf line table. Arrays of these entries
2937 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2938 supported. */
2939
2940 enum dw_line_info_opcode {
2941 /* Emit DW_LNE_set_address; the operand is the label index. */
2942 LI_set_address,
2943
2944 /* Emit a row to the matrix with the given line. This may be done
2945 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2946 special opcodes. */
2947 LI_set_line,
2948
2949 /* Emit a DW_LNS_set_file. */
2950 LI_set_file,
2951
2952 /* Emit a DW_LNS_set_column. */
2953 LI_set_column,
2954
2955 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2956 LI_negate_stmt,
2957
2958 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2959 LI_set_prologue_end,
2960 LI_set_epilogue_begin,
2961
2962 /* Emit a DW_LNE_set_discriminator. */
2963 LI_set_discriminator,
2964
2965 /* Output a Fixed Advance PC; the target PC is the label index; the
2966 base PC is the previous LI_adv_address or LI_set_address entry.
2967 We only use this when emitting debug views without assembler
2968 support, at explicit user request. Ideally, we should only use
2969 it when the offset might be zero but we can't tell: it's the only
2970 way to maybe change the PC without resetting the view number. */
2971 LI_adv_address
2972 };
2973
2974 typedef struct GTY(()) dw_line_info_struct {
2975 enum dw_line_info_opcode opcode;
2976 unsigned int val;
2977 } dw_line_info_entry;
2978
2979
2980 struct GTY(()) dw_line_info_table {
2981 /* The label that marks the end of this section. */
2982 const char *end_label;
2983
2984 /* The values for the last row of the matrix, as collected in the table.
2985 These are used to minimize the changes to the next row. */
2986 unsigned int file_num;
2987 unsigned int line_num;
2988 unsigned int column_num;
2989 int discrim_num;
2990 bool is_stmt;
2991 bool in_use;
2992
2993 /* This denotes the NEXT view number.
2994
2995 If it is 0, it is known that the NEXT view will be the first view
2996 at the given PC.
2997
2998 If it is -1, we're forcing the view number to be reset, e.g. at a
2999 function entry.
3000
3001 The meaning of other nonzero values depends on whether we're
3002 computing views internally or leaving it for the assembler to do
3003 so. If we're emitting them internally, view denotes the view
3004 number since the last known advance of PC. If we're leaving it
3005 for the assembler, it denotes the LVU label number that we're
3006 going to ask the assembler to assign. */
3007 var_loc_view view;
3008
3009 /* This counts the number of symbolic views emitted in this table
3010 since the latest view reset. Its max value, over all tables,
3011 sets symview_upper_bound. */
3012 var_loc_view symviews_since_reset;
3013
3014 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3015 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3016 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3017 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3018
3019 vec<dw_line_info_entry, va_gc> *entries;
3020 };
3021
3022 /* This is an upper bound for view numbers that the assembler may
3023 assign to symbolic views output in this translation. It is used to
3024 decide how big a field to use to represent view numbers in
3025 symview-classed attributes. */
3026
3027 static var_loc_view symview_upper_bound;
3028
3029 /* If we're keep track of location views and their reset points, and
3030 INSN is a reset point (i.e., it necessarily advances the PC), mark
3031 the next view in TABLE as reset. */
3032
3033 static void
3034 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3035 {
3036 if (!debug_internal_reset_location_views)
3037 return;
3038
3039 /* Maybe turn (part of?) this test into a default target hook. */
3040 int reset = 0;
3041
3042 if (targetm.reset_location_view)
3043 reset = targetm.reset_location_view (insn);
3044
3045 if (reset)
3046 ;
3047 else if (JUMP_TABLE_DATA_P (insn))
3048 reset = 1;
3049 else if (GET_CODE (insn) == USE
3050 || GET_CODE (insn) == CLOBBER
3051 || GET_CODE (insn) == ASM_INPUT
3052 || asm_noperands (insn) >= 0)
3053 ;
3054 else if (get_attr_min_length (insn) > 0)
3055 reset = 1;
3056
3057 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3058 RESET_NEXT_VIEW (table->view);
3059 }
3060
3061 /* Each DIE attribute has a field specifying the attribute kind,
3062 a link to the next attribute in the chain, and an attribute value.
3063 Attributes are typically linked below the DIE they modify. */
3064
3065 typedef struct GTY(()) dw_attr_struct {
3066 enum dwarf_attribute dw_attr;
3067 dw_val_node dw_attr_val;
3068 }
3069 dw_attr_node;
3070
3071
3072 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3073 The children of each node form a circular list linked by
3074 die_sib. die_child points to the node *before* the "first" child node. */
3075
3076 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3077 union die_symbol_or_type_node
3078 {
3079 const char * GTY ((tag ("0"))) die_symbol;
3080 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3081 }
3082 GTY ((desc ("%0.comdat_type_p"))) die_id;
3083 vec<dw_attr_node, va_gc> *die_attr;
3084 dw_die_ref die_parent;
3085 dw_die_ref die_child;
3086 dw_die_ref die_sib;
3087 dw_die_ref die_definition; /* ref from a specification to its definition */
3088 dw_offset die_offset;
3089 unsigned long die_abbrev;
3090 int die_mark;
3091 unsigned int decl_id;
3092 enum dwarf_tag die_tag;
3093 /* Die is used and must not be pruned as unused. */
3094 BOOL_BITFIELD die_perennial_p : 1;
3095 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3096 /* For an external ref to die_symbol if die_offset contains an extra
3097 offset to that symbol. */
3098 BOOL_BITFIELD with_offset : 1;
3099 /* Whether this DIE was removed from the DIE tree, for example via
3100 prune_unused_types. We don't consider those present from the
3101 DIE lookup routines. */
3102 BOOL_BITFIELD removed : 1;
3103 /* Lots of spare bits. */
3104 }
3105 die_node;
3106
3107 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3108 static bool early_dwarf;
3109 static bool early_dwarf_finished;
3110 class set_early_dwarf {
3111 public:
3112 bool saved;
3113 set_early_dwarf () : saved(early_dwarf)
3114 {
3115 gcc_assert (! early_dwarf_finished);
3116 early_dwarf = true;
3117 }
3118 ~set_early_dwarf () { early_dwarf = saved; }
3119 };
3120
3121 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3122 #define FOR_EACH_CHILD(die, c, expr) do { \
3123 c = die->die_child; \
3124 if (c) do { \
3125 c = c->die_sib; \
3126 expr; \
3127 } while (c != die->die_child); \
3128 } while (0)
3129
3130 /* The pubname structure */
3131
3132 typedef struct GTY(()) pubname_struct {
3133 dw_die_ref die;
3134 const char *name;
3135 }
3136 pubname_entry;
3137
3138
3139 struct GTY(()) dw_ranges {
3140 const char *label;
3141 /* If this is positive, it's a block number, otherwise it's a
3142 bitwise-negated index into dw_ranges_by_label. */
3143 int num;
3144 /* Index for the range list for DW_FORM_rnglistx. */
3145 unsigned int idx : 31;
3146 /* True if this range might be possibly in a different section
3147 from previous entry. */
3148 unsigned int maybe_new_sec : 1;
3149 };
3150
3151 /* A structure to hold a macinfo entry. */
3152
3153 typedef struct GTY(()) macinfo_struct {
3154 unsigned char code;
3155 unsigned HOST_WIDE_INT lineno;
3156 const char *info;
3157 }
3158 macinfo_entry;
3159
3160
3161 struct GTY(()) dw_ranges_by_label {
3162 const char *begin;
3163 const char *end;
3164 };
3165
3166 /* The comdat type node structure. */
3167 struct GTY(()) comdat_type_node
3168 {
3169 dw_die_ref root_die;
3170 dw_die_ref type_die;
3171 dw_die_ref skeleton_die;
3172 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3173 comdat_type_node *next;
3174 };
3175
3176 /* A list of DIEs for which we can't determine ancestry (parent_die
3177 field) just yet. Later in dwarf2out_finish we will fill in the
3178 missing bits. */
3179 typedef struct GTY(()) limbo_die_struct {
3180 dw_die_ref die;
3181 /* The tree for which this DIE was created. We use this to
3182 determine ancestry later. */
3183 tree created_for;
3184 struct limbo_die_struct *next;
3185 }
3186 limbo_die_node;
3187
3188 typedef struct skeleton_chain_struct
3189 {
3190 dw_die_ref old_die;
3191 dw_die_ref new_die;
3192 struct skeleton_chain_struct *parent;
3193 }
3194 skeleton_chain_node;
3195
3196 /* Define a macro which returns nonzero for a TYPE_DECL which was
3197 implicitly generated for a type.
3198
3199 Note that, unlike the C front-end (which generates a NULL named
3200 TYPE_DECL node for each complete tagged type, each array type,
3201 and each function type node created) the C++ front-end generates
3202 a _named_ TYPE_DECL node for each tagged type node created.
3203 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3204 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3205 front-end, but for each type, tagged or not. */
3206
3207 #define TYPE_DECL_IS_STUB(decl) \
3208 (DECL_NAME (decl) == NULL_TREE \
3209 || (DECL_ARTIFICIAL (decl) \
3210 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3211 /* This is necessary for stub decls that \
3212 appear in nested inline functions. */ \
3213 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3214 && (decl_ultimate_origin (decl) \
3215 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3216
3217 /* Information concerning the compilation unit's programming
3218 language, and compiler version. */
3219
3220 /* Fixed size portion of the DWARF compilation unit header. */
3221 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3222 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3223 + (dwarf_version >= 5 ? 4 : 3))
3224
3225 /* Fixed size portion of the DWARF comdat type unit header. */
3226 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3227 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3228 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3229
3230 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3231 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3232 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3233
3234 /* Fixed size portion of public names info. */
3235 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3236
3237 /* Fixed size portion of the address range info. */
3238 #define DWARF_ARANGES_HEADER_SIZE \
3239 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3240 DWARF2_ADDR_SIZE * 2) \
3241 - DWARF_INITIAL_LENGTH_SIZE)
3242
3243 /* Size of padding portion in the address range info. It must be
3244 aligned to twice the pointer size. */
3245 #define DWARF_ARANGES_PAD_SIZE \
3246 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3247 DWARF2_ADDR_SIZE * 2) \
3248 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3249
3250 /* Use assembler line directives if available. */
3251 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3252 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3253 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3254 #else
3255 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3256 #endif
3257 #endif
3258
3259 /* Use assembler views in line directives if available. */
3260 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3261 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3262 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3263 #else
3264 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3265 #endif
3266 #endif
3267
3268 /* Return true if GCC configure detected assembler support for .loc. */
3269
3270 bool
3271 dwarf2out_default_as_loc_support (void)
3272 {
3273 return DWARF2_ASM_LINE_DEBUG_INFO;
3274 #if (GCC_VERSION >= 3000)
3275 # undef DWARF2_ASM_LINE_DEBUG_INFO
3276 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3277 #endif
3278 }
3279
3280 /* Return true if GCC configure detected assembler support for views
3281 in .loc directives. */
3282
3283 bool
3284 dwarf2out_default_as_locview_support (void)
3285 {
3286 return DWARF2_ASM_VIEW_DEBUG_INFO;
3287 #if (GCC_VERSION >= 3000)
3288 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3289 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3290 #endif
3291 }
3292
3293 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3294 view computation, and it refers to a view identifier for which we
3295 will not emit a label because it is known to map to a view number
3296 zero. We won't allocate the bitmap if we're not using assembler
3297 support for location views, but we have to make the variable
3298 visible for GGC and for code that will be optimized out for lack of
3299 support but that's still parsed and compiled. We could abstract it
3300 out with macros, but it's not worth it. */
3301 static GTY(()) bitmap zero_view_p;
3302
3303 /* Evaluate to TRUE iff N is known to identify the first location view
3304 at its PC. When not using assembler location view computation,
3305 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3306 and views label numbers recorded in it are the ones known to be
3307 zero. */
3308 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3309 || (N) == (var_loc_view)-1 \
3310 || (zero_view_p \
3311 && bitmap_bit_p (zero_view_p, (N))))
3312
3313 /* Return true iff we're to emit .loc directives for the assembler to
3314 generate line number sections.
3315
3316 When we're not emitting views, all we need from the assembler is
3317 support for .loc directives.
3318
3319 If we are emitting views, we can only use the assembler's .loc
3320 support if it also supports views.
3321
3322 When the compiler is emitting the line number programs and
3323 computing view numbers itself, it resets view numbers at known PC
3324 changes and counts from that, and then it emits view numbers as
3325 literal constants in locviewlists. There are cases in which the
3326 compiler is not sure about PC changes, e.g. when extra alignment is
3327 requested for a label. In these cases, the compiler may not reset
3328 the view counter, and the potential PC advance in the line number
3329 program will use an opcode that does not reset the view counter
3330 even if the PC actually changes, so that compiler and debug info
3331 consumer can keep view numbers in sync.
3332
3333 When the compiler defers view computation to the assembler, it
3334 emits symbolic view numbers in locviewlists, with the exception of
3335 views known to be zero (forced resets, or reset after
3336 compiler-visible PC changes): instead of emitting symbols for
3337 these, we emit literal zero and assert the assembler agrees with
3338 the compiler's assessment. We could use symbolic views everywhere,
3339 instead of special-casing zero views, but then we'd be unable to
3340 optimize out locviewlists that contain only zeros. */
3341
3342 static bool
3343 output_asm_line_debug_info (void)
3344 {
3345 return (dwarf2out_as_loc_support
3346 && (dwarf2out_as_locview_support
3347 || !debug_variable_location_views));
3348 }
3349
3350 /* Minimum line offset in a special line info. opcode.
3351 This value was chosen to give a reasonable range of values. */
3352 #define DWARF_LINE_BASE -10
3353
3354 /* First special line opcode - leave room for the standard opcodes. */
3355 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3356
3357 /* Range of line offsets in a special line info. opcode. */
3358 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3359
3360 /* Flag that indicates the initial value of the is_stmt_start flag.
3361 In the present implementation, we do not mark any lines as
3362 the beginning of a source statement, because that information
3363 is not made available by the GCC front-end. */
3364 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3365
3366 /* Maximum number of operations per instruction bundle. */
3367 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3368 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3369 #endif
3370
3371 /* This location is used by calc_die_sizes() to keep track
3372 the offset of each DIE within the .debug_info section. */
3373 static unsigned long next_die_offset;
3374
3375 /* Record the root of the DIE's built for the current compilation unit. */
3376 static GTY(()) dw_die_ref single_comp_unit_die;
3377
3378 /* A list of type DIEs that have been separated into comdat sections. */
3379 static GTY(()) comdat_type_node *comdat_type_list;
3380
3381 /* A list of CU DIEs that have been separated. */
3382 static GTY(()) limbo_die_node *cu_die_list;
3383
3384 /* A list of DIEs with a NULL parent waiting to be relocated. */
3385 static GTY(()) limbo_die_node *limbo_die_list;
3386
3387 /* A list of DIEs for which we may have to generate
3388 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3389 static GTY(()) limbo_die_node *deferred_asm_name;
3390
3391 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3392 {
3393 typedef const char *compare_type;
3394
3395 static hashval_t hash (dwarf_file_data *);
3396 static bool equal (dwarf_file_data *, const char *);
3397 };
3398
3399 /* Filenames referenced by this compilation unit. */
3400 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3401
3402 struct decl_die_hasher : ggc_ptr_hash<die_node>
3403 {
3404 typedef tree compare_type;
3405
3406 static hashval_t hash (die_node *);
3407 static bool equal (die_node *, tree);
3408 };
3409 /* A hash table of references to DIE's that describe declarations.
3410 The key is a DECL_UID() which is a unique number identifying each decl. */
3411 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3412
3413 struct GTY ((for_user)) variable_value_struct {
3414 unsigned int decl_id;
3415 vec<dw_die_ref, va_gc> *dies;
3416 };
3417
3418 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3419 {
3420 typedef tree compare_type;
3421
3422 static hashval_t hash (variable_value_struct *);
3423 static bool equal (variable_value_struct *, tree);
3424 };
3425 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3426 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3427 DECL_CONTEXT of the referenced VAR_DECLs. */
3428 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3429
3430 struct block_die_hasher : ggc_ptr_hash<die_struct>
3431 {
3432 static hashval_t hash (die_struct *);
3433 static bool equal (die_struct *, die_struct *);
3434 };
3435
3436 /* A hash table of references to DIE's that describe COMMON blocks.
3437 The key is DECL_UID() ^ die_parent. */
3438 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3439
3440 typedef struct GTY(()) die_arg_entry_struct {
3441 dw_die_ref die;
3442 tree arg;
3443 } die_arg_entry;
3444
3445
3446 /* Node of the variable location list. */
3447 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3448 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3449 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3450 in mode of the EXPR_LIST node and first EXPR_LIST operand
3451 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3452 location or NULL for padding. For larger bitsizes,
3453 mode is 0 and first operand is a CONCAT with bitsize
3454 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3455 NULL as second operand. */
3456 rtx GTY (()) loc;
3457 const char * GTY (()) label;
3458 struct var_loc_node * GTY (()) next;
3459 var_loc_view view;
3460 };
3461
3462 /* Variable location list. */
3463 struct GTY ((for_user)) var_loc_list_def {
3464 struct var_loc_node * GTY (()) first;
3465
3466 /* Pointer to the last but one or last element of the
3467 chained list. If the list is empty, both first and
3468 last are NULL, if the list contains just one node
3469 or the last node certainly is not redundant, it points
3470 to the last node, otherwise points to the last but one.
3471 Do not mark it for GC because it is marked through the chain. */
3472 struct var_loc_node * GTY ((skip ("%h"))) last;
3473
3474 /* Pointer to the last element before section switch,
3475 if NULL, either sections weren't switched or first
3476 is after section switch. */
3477 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3478
3479 /* DECL_UID of the variable decl. */
3480 unsigned int decl_id;
3481 };
3482 typedef struct var_loc_list_def var_loc_list;
3483
3484 /* Call argument location list. */
3485 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3486 rtx GTY (()) call_arg_loc_note;
3487 const char * GTY (()) label;
3488 tree GTY (()) block;
3489 bool tail_call_p;
3490 rtx GTY (()) symbol_ref;
3491 struct call_arg_loc_node * GTY (()) next;
3492 };
3493
3494
3495 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3496 {
3497 typedef const_tree compare_type;
3498
3499 static hashval_t hash (var_loc_list *);
3500 static bool equal (var_loc_list *, const_tree);
3501 };
3502
3503 /* Table of decl location linked lists. */
3504 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3505
3506 /* Head and tail of call_arg_loc chain. */
3507 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3508 static struct call_arg_loc_node *call_arg_loc_last;
3509
3510 /* Number of call sites in the current function. */
3511 static int call_site_count = -1;
3512 /* Number of tail call sites in the current function. */
3513 static int tail_call_site_count = -1;
3514
3515 /* A cached location list. */
3516 struct GTY ((for_user)) cached_dw_loc_list_def {
3517 /* The DECL_UID of the decl that this entry describes. */
3518 unsigned int decl_id;
3519
3520 /* The cached location list. */
3521 dw_loc_list_ref loc_list;
3522 };
3523 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3524
3525 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3526 {
3527
3528 typedef const_tree compare_type;
3529
3530 static hashval_t hash (cached_dw_loc_list *);
3531 static bool equal (cached_dw_loc_list *, const_tree);
3532 };
3533
3534 /* Table of cached location lists. */
3535 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3536
3537 /* A vector of references to DIE's that are uniquely identified by their tag,
3538 presence/absence of children DIE's, and list of attribute/value pairs. */
3539 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3540
3541 /* A hash map to remember the stack usage for DWARF procedures. The value
3542 stored is the stack size difference between before the DWARF procedure
3543 invokation and after it returned. In other words, for a DWARF procedure
3544 that consumes N stack slots and that pushes M ones, this stores M - N. */
3545 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3546
3547 /* A global counter for generating labels for line number data. */
3548 static unsigned int line_info_label_num;
3549
3550 /* The current table to which we should emit line number information
3551 for the current function. This will be set up at the beginning of
3552 assembly for the function. */
3553 static GTY(()) dw_line_info_table *cur_line_info_table;
3554
3555 /* The two default tables of line number info. */
3556 static GTY(()) dw_line_info_table *text_section_line_info;
3557 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3558
3559 /* The set of all non-default tables of line number info. */
3560 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3561
3562 /* A flag to tell pubnames/types export if there is an info section to
3563 refer to. */
3564 static bool info_section_emitted;
3565
3566 /* A pointer to the base of a table that contains a list of publicly
3567 accessible names. */
3568 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3569
3570 /* A pointer to the base of a table that contains a list of publicly
3571 accessible types. */
3572 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3573
3574 /* A pointer to the base of a table that contains a list of macro
3575 defines/undefines (and file start/end markers). */
3576 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3577
3578 /* True if .debug_macinfo or .debug_macros section is going to be
3579 emitted. */
3580 #define have_macinfo \
3581 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3582 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3583 && !macinfo_table->is_empty ())
3584
3585 /* Vector of dies for which we should generate .debug_ranges info. */
3586 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3587
3588 /* Vector of pairs of labels referenced in ranges_table. */
3589 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3590
3591 /* Whether we have location lists that need outputting */
3592 static GTY(()) bool have_location_lists;
3593
3594 /* Unique label counter. */
3595 static GTY(()) unsigned int loclabel_num;
3596
3597 /* Unique label counter for point-of-call tables. */
3598 static GTY(()) unsigned int poc_label_num;
3599
3600 /* The last file entry emitted by maybe_emit_file(). */
3601 static GTY(()) struct dwarf_file_data * last_emitted_file;
3602
3603 /* Number of internal labels generated by gen_internal_sym(). */
3604 static GTY(()) int label_num;
3605
3606 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3607
3608 /* Instances of generic types for which we need to generate debug
3609 info that describe their generic parameters and arguments. That
3610 generation needs to happen once all types are properly laid out so
3611 we do it at the end of compilation. */
3612 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3613
3614 /* Offset from the "steady-state frame pointer" to the frame base,
3615 within the current function. */
3616 static poly_int64 frame_pointer_fb_offset;
3617 static bool frame_pointer_fb_offset_valid;
3618
3619 static vec<dw_die_ref> base_types;
3620
3621 /* Flags to represent a set of attribute classes for attributes that represent
3622 a scalar value (bounds, pointers, ...). */
3623 enum dw_scalar_form
3624 {
3625 dw_scalar_form_constant = 0x01,
3626 dw_scalar_form_exprloc = 0x02,
3627 dw_scalar_form_reference = 0x04
3628 };
3629
3630 /* Forward declarations for functions defined in this file. */
3631
3632 static int is_pseudo_reg (const_rtx);
3633 static tree type_main_variant (tree);
3634 static int is_tagged_type (const_tree);
3635 static const char *dwarf_tag_name (unsigned);
3636 static const char *dwarf_attr_name (unsigned);
3637 static const char *dwarf_form_name (unsigned);
3638 static tree decl_ultimate_origin (const_tree);
3639 static tree decl_class_context (tree);
3640 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3641 static inline enum dw_val_class AT_class (dw_attr_node *);
3642 static inline unsigned int AT_index (dw_attr_node *);
3643 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3644 static inline unsigned AT_flag (dw_attr_node *);
3645 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3646 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3647 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3648 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3649 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3650 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3651 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3652 unsigned int, unsigned char *);
3653 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3654 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3655 static inline const char *AT_string (dw_attr_node *);
3656 static enum dwarf_form AT_string_form (dw_attr_node *);
3657 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3658 static void add_AT_specification (dw_die_ref, dw_die_ref);
3659 static inline dw_die_ref AT_ref (dw_attr_node *);
3660 static inline int AT_ref_external (dw_attr_node *);
3661 static inline void set_AT_ref_external (dw_attr_node *, int);
3662 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3663 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3664 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3665 dw_loc_list_ref);
3666 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3667 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3668 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3669 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3670 static void remove_addr_table_entry (addr_table_entry *);
3671 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3672 static inline rtx AT_addr (dw_attr_node *);
3673 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3674 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3675 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3676 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3677 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3678 unsigned long, bool);
3679 static inline const char *AT_lbl (dw_attr_node *);
3680 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3681 static const char *get_AT_low_pc (dw_die_ref);
3682 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3683 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3684 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3685 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3686 static bool is_c (void);
3687 static bool is_cxx (void);
3688 static bool is_cxx (const_tree);
3689 static bool is_fortran (void);
3690 static bool is_ada (void);
3691 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3692 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3693 static void add_child_die (dw_die_ref, dw_die_ref);
3694 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3695 static dw_die_ref lookup_type_die (tree);
3696 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3697 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3698 static void equate_type_number_to_die (tree, dw_die_ref);
3699 static dw_die_ref lookup_decl_die (tree);
3700 static var_loc_list *lookup_decl_loc (const_tree);
3701 static void equate_decl_number_to_die (tree, dw_die_ref);
3702 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3703 static void print_spaces (FILE *);
3704 static void print_die (dw_die_ref, FILE *);
3705 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3706 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3707 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3708 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3709 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3710 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3711 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3712 struct md5_ctx *, int *);
3713 struct checksum_attributes;
3714 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3715 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3716 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3717 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3718 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3719 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3720 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3721 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3722 static int is_type_die (dw_die_ref);
3723 static inline bool is_template_instantiation (dw_die_ref);
3724 static int is_declaration_die (dw_die_ref);
3725 static int should_move_die_to_comdat (dw_die_ref);
3726 static dw_die_ref clone_as_declaration (dw_die_ref);
3727 static dw_die_ref clone_die (dw_die_ref);
3728 static dw_die_ref clone_tree (dw_die_ref);
3729 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3730 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3731 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3732 static dw_die_ref generate_skeleton (dw_die_ref);
3733 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3734 dw_die_ref,
3735 dw_die_ref);
3736 static void break_out_comdat_types (dw_die_ref);
3737 static void copy_decls_for_unworthy_types (dw_die_ref);
3738
3739 static void add_sibling_attributes (dw_die_ref);
3740 static void output_location_lists (dw_die_ref);
3741 static int constant_size (unsigned HOST_WIDE_INT);
3742 static unsigned long size_of_die (dw_die_ref);
3743 static void calc_die_sizes (dw_die_ref);
3744 static void calc_base_type_die_sizes (void);
3745 static void mark_dies (dw_die_ref);
3746 static void unmark_dies (dw_die_ref);
3747 static void unmark_all_dies (dw_die_ref);
3748 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3749 static unsigned long size_of_aranges (void);
3750 static enum dwarf_form value_format (dw_attr_node *);
3751 static void output_value_format (dw_attr_node *);
3752 static void output_abbrev_section (void);
3753 static void output_die_abbrevs (unsigned long, dw_die_ref);
3754 static void output_die (dw_die_ref);
3755 static void output_compilation_unit_header (enum dwarf_unit_type);
3756 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3757 static void output_comdat_type_unit (comdat_type_node *, bool);
3758 static const char *dwarf2_name (tree, int);
3759 static void add_pubname (tree, dw_die_ref);
3760 static void add_enumerator_pubname (const char *, dw_die_ref);
3761 static void add_pubname_string (const char *, dw_die_ref);
3762 static void add_pubtype (tree, dw_die_ref);
3763 static void output_pubnames (vec<pubname_entry, va_gc> *);
3764 static void output_aranges (void);
3765 static unsigned int add_ranges (const_tree, bool = false);
3766 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3767 bool *, bool);
3768 static void output_ranges (void);
3769 static dw_line_info_table *new_line_info_table (void);
3770 static void output_line_info (bool);
3771 static void output_file_names (void);
3772 static dw_die_ref base_type_die (tree, bool);
3773 static int is_base_type (tree);
3774 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3775 static int decl_quals (const_tree);
3776 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3777 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3778 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3779 static unsigned int dbx_reg_number (const_rtx);
3780 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3781 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3782 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3783 enum var_init_status);
3784 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3785 enum var_init_status);
3786 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3787 enum var_init_status);
3788 static int is_based_loc (const_rtx);
3789 static bool resolve_one_addr (rtx *);
3790 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3791 enum var_init_status);
3792 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3793 enum var_init_status);
3794 struct loc_descr_context;
3795 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3796 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3797 static dw_loc_list_ref loc_list_from_tree (tree, int,
3798 struct loc_descr_context *);
3799 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3800 struct loc_descr_context *);
3801 static tree field_type (const_tree);
3802 static unsigned int simple_type_align_in_bits (const_tree);
3803 static unsigned int simple_decl_align_in_bits (const_tree);
3804 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3805 struct vlr_context;
3806 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3807 HOST_WIDE_INT *);
3808 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3809 dw_loc_list_ref);
3810 static void add_data_member_location_attribute (dw_die_ref, tree,
3811 struct vlr_context *);
3812 static bool add_const_value_attribute (dw_die_ref, rtx);
3813 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3814 static void insert_wide_int (const wide_int &, unsigned char *, int);
3815 static void insert_float (const_rtx, unsigned char *);
3816 static rtx rtl_for_decl_location (tree);
3817 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3818 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3819 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3820 static void add_name_attribute (dw_die_ref, const char *);
3821 static void add_desc_attribute (dw_die_ref, tree);
3822 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3823 static void add_comp_dir_attribute (dw_die_ref);
3824 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3825 struct loc_descr_context *);
3826 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3827 struct loc_descr_context *);
3828 static void add_subscript_info (dw_die_ref, tree, bool);
3829 static void add_byte_size_attribute (dw_die_ref, tree);
3830 static void add_alignment_attribute (dw_die_ref, tree);
3831 static void add_bit_offset_attribute (dw_die_ref, tree);
3832 static void add_bit_size_attribute (dw_die_ref, tree);
3833 static void add_prototyped_attribute (dw_die_ref, tree);
3834 static void add_abstract_origin_attribute (dw_die_ref, tree);
3835 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3836 static void add_src_coords_attributes (dw_die_ref, tree);
3837 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3838 static void add_discr_value (dw_die_ref, dw_discr_value *);
3839 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3840 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3841 static dw_die_ref scope_die_for (tree, dw_die_ref);
3842 static inline int local_scope_p (dw_die_ref);
3843 static inline int class_scope_p (dw_die_ref);
3844 static inline int class_or_namespace_scope_p (dw_die_ref);
3845 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3846 static void add_calling_convention_attribute (dw_die_ref, tree);
3847 static const char *type_tag (const_tree);
3848 static tree member_declared_type (const_tree);
3849 #if 0
3850 static const char *decl_start_label (tree);
3851 #endif
3852 static void gen_array_type_die (tree, dw_die_ref);
3853 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3854 #if 0
3855 static void gen_entry_point_die (tree, dw_die_ref);
3856 #endif
3857 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3858 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3859 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3860 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3861 static void gen_formal_types_die (tree, dw_die_ref);
3862 static void gen_subprogram_die (tree, dw_die_ref);
3863 static void gen_variable_die (tree, tree, dw_die_ref);
3864 static void gen_const_die (tree, dw_die_ref);
3865 static void gen_label_die (tree, dw_die_ref);
3866 static void gen_lexical_block_die (tree, dw_die_ref);
3867 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3868 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3869 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3870 static dw_die_ref gen_compile_unit_die (const char *);
3871 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3872 static void gen_member_die (tree, dw_die_ref);
3873 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3874 enum debug_info_usage);
3875 static void gen_subroutine_type_die (tree, dw_die_ref);
3876 static void gen_typedef_die (tree, dw_die_ref);
3877 static void gen_type_die (tree, dw_die_ref);
3878 static void gen_block_die (tree, dw_die_ref);
3879 static void decls_for_scope (tree, dw_die_ref, bool = true);
3880 static bool is_naming_typedef_decl (const_tree);
3881 static inline dw_die_ref get_context_die (tree);
3882 static void gen_namespace_die (tree, dw_die_ref);
3883 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3884 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3885 static dw_die_ref force_decl_die (tree);
3886 static dw_die_ref force_type_die (tree);
3887 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3888 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3889 static struct dwarf_file_data * lookup_filename (const char *);
3890 static void retry_incomplete_types (void);
3891 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3892 static void gen_generic_params_dies (tree);
3893 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3894 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3895 static void splice_child_die (dw_die_ref, dw_die_ref);
3896 static int file_info_cmp (const void *, const void *);
3897 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3898 const char *, var_loc_view, const char *);
3899 static void output_loc_list (dw_loc_list_ref);
3900 static char *gen_internal_sym (const char *);
3901 static bool want_pubnames (void);
3902
3903 static void prune_unmark_dies (dw_die_ref);
3904 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3905 static void prune_unused_types_mark (dw_die_ref, int);
3906 static void prune_unused_types_walk (dw_die_ref);
3907 static void prune_unused_types_walk_attribs (dw_die_ref);
3908 static void prune_unused_types_prune (dw_die_ref);
3909 static void prune_unused_types (void);
3910 static int maybe_emit_file (struct dwarf_file_data *fd);
3911 static inline const char *AT_vms_delta1 (dw_attr_node *);
3912 static inline const char *AT_vms_delta2 (dw_attr_node *);
3913 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3914 const char *, const char *);
3915 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3916 static void gen_remaining_tmpl_value_param_die_attribute (void);
3917 static bool generic_type_p (tree);
3918 static void schedule_generic_params_dies_gen (tree t);
3919 static void gen_scheduled_generic_parms_dies (void);
3920 static void resolve_variable_values (void);
3921
3922 static const char *comp_dir_string (void);
3923
3924 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3925
3926 /* enum for tracking thread-local variables whose address is really an offset
3927 relative to the TLS pointer, which will need link-time relocation, but will
3928 not need relocation by the DWARF consumer. */
3929
3930 enum dtprel_bool
3931 {
3932 dtprel_false = 0,
3933 dtprel_true = 1
3934 };
3935
3936 /* Return the operator to use for an address of a variable. For dtprel_true, we
3937 use DW_OP_const*. For regular variables, which need both link-time
3938 relocation and consumer-level relocation (e.g., to account for shared objects
3939 loaded at a random address), we use DW_OP_addr*. */
3940
3941 static inline enum dwarf_location_atom
3942 dw_addr_op (enum dtprel_bool dtprel)
3943 {
3944 if (dtprel == dtprel_true)
3945 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3946 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3947 else
3948 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3949 }
3950
3951 /* Return a pointer to a newly allocated address location description. If
3952 dwarf_split_debug_info is true, then record the address with the appropriate
3953 relocation. */
3954 static inline dw_loc_descr_ref
3955 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3956 {
3957 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3958
3959 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3960 ref->dw_loc_oprnd1.v.val_addr = addr;
3961 ref->dtprel = dtprel;
3962 if (dwarf_split_debug_info)
3963 ref->dw_loc_oprnd1.val_entry
3964 = add_addr_table_entry (addr,
3965 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3966 else
3967 ref->dw_loc_oprnd1.val_entry = NULL;
3968
3969 return ref;
3970 }
3971
3972 /* Section names used to hold DWARF debugging information. */
3973
3974 #ifndef DEBUG_INFO_SECTION
3975 #define DEBUG_INFO_SECTION ".debug_info"
3976 #endif
3977 #ifndef DEBUG_DWO_INFO_SECTION
3978 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3979 #endif
3980 #ifndef DEBUG_LTO_INFO_SECTION
3981 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3982 #endif
3983 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3984 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3985 #endif
3986 #ifndef DEBUG_ABBREV_SECTION
3987 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3988 #endif
3989 #ifndef DEBUG_LTO_ABBREV_SECTION
3990 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3991 #endif
3992 #ifndef DEBUG_DWO_ABBREV_SECTION
3993 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3994 #endif
3995 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3996 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3997 #endif
3998 #ifndef DEBUG_ARANGES_SECTION
3999 #define DEBUG_ARANGES_SECTION ".debug_aranges"
4000 #endif
4001 #ifndef DEBUG_ADDR_SECTION
4002 #define DEBUG_ADDR_SECTION ".debug_addr"
4003 #endif
4004 #ifndef DEBUG_MACINFO_SECTION
4005 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4006 #endif
4007 #ifndef DEBUG_LTO_MACINFO_SECTION
4008 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4009 #endif
4010 #ifndef DEBUG_DWO_MACINFO_SECTION
4011 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4012 #endif
4013 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4014 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4015 #endif
4016 #ifndef DEBUG_MACRO_SECTION
4017 #define DEBUG_MACRO_SECTION ".debug_macro"
4018 #endif
4019 #ifndef DEBUG_LTO_MACRO_SECTION
4020 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4021 #endif
4022 #ifndef DEBUG_DWO_MACRO_SECTION
4023 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4024 #endif
4025 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4026 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4027 #endif
4028 #ifndef DEBUG_LINE_SECTION
4029 #define DEBUG_LINE_SECTION ".debug_line"
4030 #endif
4031 #ifndef DEBUG_LTO_LINE_SECTION
4032 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4033 #endif
4034 #ifndef DEBUG_DWO_LINE_SECTION
4035 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4036 #endif
4037 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4038 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4039 #endif
4040 #ifndef DEBUG_LOC_SECTION
4041 #define DEBUG_LOC_SECTION ".debug_loc"
4042 #endif
4043 #ifndef DEBUG_DWO_LOC_SECTION
4044 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4045 #endif
4046 #ifndef DEBUG_LOCLISTS_SECTION
4047 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4048 #endif
4049 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4050 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4051 #endif
4052 #ifndef DEBUG_PUBNAMES_SECTION
4053 #define DEBUG_PUBNAMES_SECTION \
4054 ((debug_generate_pub_sections == 2) \
4055 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4056 #endif
4057 #ifndef DEBUG_PUBTYPES_SECTION
4058 #define DEBUG_PUBTYPES_SECTION \
4059 ((debug_generate_pub_sections == 2) \
4060 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4061 #endif
4062 #ifndef DEBUG_STR_OFFSETS_SECTION
4063 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4064 #endif
4065 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4066 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4067 #endif
4068 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4069 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4070 #endif
4071 #ifndef DEBUG_STR_SECTION
4072 #define DEBUG_STR_SECTION ".debug_str"
4073 #endif
4074 #ifndef DEBUG_LTO_STR_SECTION
4075 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4076 #endif
4077 #ifndef DEBUG_STR_DWO_SECTION
4078 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4079 #endif
4080 #ifndef DEBUG_LTO_STR_DWO_SECTION
4081 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4082 #endif
4083 #ifndef DEBUG_RANGES_SECTION
4084 #define DEBUG_RANGES_SECTION ".debug_ranges"
4085 #endif
4086 #ifndef DEBUG_RNGLISTS_SECTION
4087 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4088 #endif
4089 #ifndef DEBUG_LINE_STR_SECTION
4090 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4091 #endif
4092 #ifndef DEBUG_LTO_LINE_STR_SECTION
4093 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4094 #endif
4095
4096 /* Standard ELF section names for compiled code and data. */
4097 #ifndef TEXT_SECTION_NAME
4098 #define TEXT_SECTION_NAME ".text"
4099 #endif
4100
4101 /* Section flags for .debug_str section. */
4102 #define DEBUG_STR_SECTION_FLAGS \
4103 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4104 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4105 : SECTION_DEBUG)
4106
4107 /* Section flags for .debug_str.dwo section. */
4108 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4109
4110 /* Attribute used to refer to the macro section. */
4111 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4112 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4113
4114 /* Labels we insert at beginning sections we can reference instead of
4115 the section names themselves. */
4116
4117 #ifndef TEXT_SECTION_LABEL
4118 #define TEXT_SECTION_LABEL "Ltext"
4119 #endif
4120 #ifndef COLD_TEXT_SECTION_LABEL
4121 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4122 #endif
4123 #ifndef DEBUG_LINE_SECTION_LABEL
4124 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4125 #endif
4126 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4127 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4128 #endif
4129 #ifndef DEBUG_INFO_SECTION_LABEL
4130 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4131 #endif
4132 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4133 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4134 #endif
4135 #ifndef DEBUG_ABBREV_SECTION_LABEL
4136 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4137 #endif
4138 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4139 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4140 #endif
4141 #ifndef DEBUG_ADDR_SECTION_LABEL
4142 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4143 #endif
4144 #ifndef DEBUG_LOC_SECTION_LABEL
4145 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4146 #endif
4147 #ifndef DEBUG_RANGES_SECTION_LABEL
4148 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4149 #endif
4150 #ifndef DEBUG_MACINFO_SECTION_LABEL
4151 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4152 #endif
4153 #ifndef DEBUG_MACRO_SECTION_LABEL
4154 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4155 #endif
4156 #define SKELETON_COMP_DIE_ABBREV 1
4157 #define SKELETON_TYPE_DIE_ABBREV 2
4158
4159 /* Definitions of defaults for formats and names of various special
4160 (artificial) labels which may be generated within this file (when the -g
4161 options is used and DWARF2_DEBUGGING_INFO is in effect.
4162 If necessary, these may be overridden from within the tm.h file, but
4163 typically, overriding these defaults is unnecessary. */
4164
4165 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4177 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4178 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4179 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4180
4181 #ifndef TEXT_END_LABEL
4182 #define TEXT_END_LABEL "Letext"
4183 #endif
4184 #ifndef COLD_END_LABEL
4185 #define COLD_END_LABEL "Letext_cold"
4186 #endif
4187 #ifndef BLOCK_BEGIN_LABEL
4188 #define BLOCK_BEGIN_LABEL "LBB"
4189 #endif
4190 #ifndef BLOCK_INLINE_ENTRY_LABEL
4191 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4192 #endif
4193 #ifndef BLOCK_END_LABEL
4194 #define BLOCK_END_LABEL "LBE"
4195 #endif
4196 #ifndef LINE_CODE_LABEL
4197 #define LINE_CODE_LABEL "LM"
4198 #endif
4199
4200 \f
4201 /* Return the root of the DIE's built for the current compilation unit. */
4202 static dw_die_ref
4203 comp_unit_die (void)
4204 {
4205 if (!single_comp_unit_die)
4206 single_comp_unit_die = gen_compile_unit_die (NULL);
4207 return single_comp_unit_die;
4208 }
4209
4210 /* We allow a language front-end to designate a function that is to be
4211 called to "demangle" any name before it is put into a DIE. */
4212
4213 static const char *(*demangle_name_func) (const char *);
4214
4215 void
4216 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4217 {
4218 demangle_name_func = func;
4219 }
4220
4221 /* Test if rtl node points to a pseudo register. */
4222
4223 static inline int
4224 is_pseudo_reg (const_rtx rtl)
4225 {
4226 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4227 || (GET_CODE (rtl) == SUBREG
4228 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4229 }
4230
4231 /* Return a reference to a type, with its const and volatile qualifiers
4232 removed. */
4233
4234 static inline tree
4235 type_main_variant (tree type)
4236 {
4237 type = TYPE_MAIN_VARIANT (type);
4238
4239 /* ??? There really should be only one main variant among any group of
4240 variants of a given type (and all of the MAIN_VARIANT values for all
4241 members of the group should point to that one type) but sometimes the C
4242 front-end messes this up for array types, so we work around that bug
4243 here. */
4244 if (TREE_CODE (type) == ARRAY_TYPE)
4245 while (type != TYPE_MAIN_VARIANT (type))
4246 type = TYPE_MAIN_VARIANT (type);
4247
4248 return type;
4249 }
4250
4251 /* Return nonzero if the given type node represents a tagged type. */
4252
4253 static inline int
4254 is_tagged_type (const_tree type)
4255 {
4256 enum tree_code code = TREE_CODE (type);
4257
4258 return (code == RECORD_TYPE || code == UNION_TYPE
4259 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4260 }
4261
4262 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4263
4264 static void
4265 get_ref_die_offset_label (char *label, dw_die_ref ref)
4266 {
4267 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4268 }
4269
4270 /* Return die_offset of a DIE reference to a base type. */
4271
4272 static unsigned long int
4273 get_base_type_offset (dw_die_ref ref)
4274 {
4275 if (ref->die_offset)
4276 return ref->die_offset;
4277 if (comp_unit_die ()->die_abbrev)
4278 {
4279 calc_base_type_die_sizes ();
4280 gcc_assert (ref->die_offset);
4281 }
4282 return ref->die_offset;
4283 }
4284
4285 /* Return die_offset of a DIE reference other than base type. */
4286
4287 static unsigned long int
4288 get_ref_die_offset (dw_die_ref ref)
4289 {
4290 gcc_assert (ref->die_offset);
4291 return ref->die_offset;
4292 }
4293
4294 /* Convert a DIE tag into its string name. */
4295
4296 static const char *
4297 dwarf_tag_name (unsigned int tag)
4298 {
4299 const char *name = get_DW_TAG_name (tag);
4300
4301 if (name != NULL)
4302 return name;
4303
4304 return "DW_TAG_<unknown>";
4305 }
4306
4307 /* Convert a DWARF attribute code into its string name. */
4308
4309 static const char *
4310 dwarf_attr_name (unsigned int attr)
4311 {
4312 const char *name;
4313
4314 switch (attr)
4315 {
4316 #if VMS_DEBUGGING_INFO
4317 case DW_AT_HP_prologue:
4318 return "DW_AT_HP_prologue";
4319 #else
4320 case DW_AT_MIPS_loop_unroll_factor:
4321 return "DW_AT_MIPS_loop_unroll_factor";
4322 #endif
4323
4324 #if VMS_DEBUGGING_INFO
4325 case DW_AT_HP_epilogue:
4326 return "DW_AT_HP_epilogue";
4327 #else
4328 case DW_AT_MIPS_stride:
4329 return "DW_AT_MIPS_stride";
4330 #endif
4331 }
4332
4333 name = get_DW_AT_name (attr);
4334
4335 if (name != NULL)
4336 return name;
4337
4338 return "DW_AT_<unknown>";
4339 }
4340
4341 /* Convert a DWARF value form code into its string name. */
4342
4343 static const char *
4344 dwarf_form_name (unsigned int form)
4345 {
4346 const char *name = get_DW_FORM_name (form);
4347
4348 if (name != NULL)
4349 return name;
4350
4351 return "DW_FORM_<unknown>";
4352 }
4353 \f
4354 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4355 instance of an inlined instance of a decl which is local to an inline
4356 function, so we have to trace all of the way back through the origin chain
4357 to find out what sort of node actually served as the original seed for the
4358 given block. */
4359
4360 static tree
4361 decl_ultimate_origin (const_tree decl)
4362 {
4363 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4364 return NULL_TREE;
4365
4366 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4367 we're trying to output the abstract instance of this function. */
4368 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4369 return NULL_TREE;
4370
4371 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4372 most distant ancestor, this should never happen. */
4373 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4374
4375 return DECL_ABSTRACT_ORIGIN (decl);
4376 }
4377
4378 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4379 of a virtual function may refer to a base class, so we check the 'this'
4380 parameter. */
4381
4382 static tree
4383 decl_class_context (tree decl)
4384 {
4385 tree context = NULL_TREE;
4386
4387 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4388 context = DECL_CONTEXT (decl);
4389 else
4390 context = TYPE_MAIN_VARIANT
4391 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4392
4393 if (context && !TYPE_P (context))
4394 context = NULL_TREE;
4395
4396 return context;
4397 }
4398 \f
4399 /* Add an attribute/value pair to a DIE. */
4400
4401 static inline void
4402 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4403 {
4404 /* Maybe this should be an assert? */
4405 if (die == NULL)
4406 return;
4407
4408 if (flag_checking)
4409 {
4410 /* Check we do not add duplicate attrs. Can't use get_AT here
4411 because that recurses to the specification/abstract origin DIE. */
4412 dw_attr_node *a;
4413 unsigned ix;
4414 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4415 gcc_assert (a->dw_attr != attr->dw_attr);
4416 }
4417
4418 vec_safe_reserve (die->die_attr, 1);
4419 vec_safe_push (die->die_attr, *attr);
4420 }
4421
4422 static inline enum dw_val_class
4423 AT_class (dw_attr_node *a)
4424 {
4425 return a->dw_attr_val.val_class;
4426 }
4427
4428 /* Return the index for any attribute that will be referenced with a
4429 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4430 indices are stored in dw_attr_val.v.val_str for reference counting
4431 pruning. */
4432
4433 static inline unsigned int
4434 AT_index (dw_attr_node *a)
4435 {
4436 if (AT_class (a) == dw_val_class_str)
4437 return a->dw_attr_val.v.val_str->index;
4438 else if (a->dw_attr_val.val_entry != NULL)
4439 return a->dw_attr_val.val_entry->index;
4440 return NOT_INDEXED;
4441 }
4442
4443 /* Add a flag value attribute to a DIE. */
4444
4445 static inline void
4446 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4447 {
4448 dw_attr_node attr;
4449
4450 attr.dw_attr = attr_kind;
4451 attr.dw_attr_val.val_class = dw_val_class_flag;
4452 attr.dw_attr_val.val_entry = NULL;
4453 attr.dw_attr_val.v.val_flag = flag;
4454 add_dwarf_attr (die, &attr);
4455 }
4456
4457 static inline unsigned
4458 AT_flag (dw_attr_node *a)
4459 {
4460 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4461 return a->dw_attr_val.v.val_flag;
4462 }
4463
4464 /* Add a signed integer attribute value to a DIE. */
4465
4466 static inline void
4467 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4468 {
4469 dw_attr_node attr;
4470
4471 attr.dw_attr = attr_kind;
4472 attr.dw_attr_val.val_class = dw_val_class_const;
4473 attr.dw_attr_val.val_entry = NULL;
4474 attr.dw_attr_val.v.val_int = int_val;
4475 add_dwarf_attr (die, &attr);
4476 }
4477
4478 static inline HOST_WIDE_INT
4479 AT_int (dw_attr_node *a)
4480 {
4481 gcc_assert (a && (AT_class (a) == dw_val_class_const
4482 || AT_class (a) == dw_val_class_const_implicit));
4483 return a->dw_attr_val.v.val_int;
4484 }
4485
4486 /* Add an unsigned integer attribute value to a DIE. */
4487
4488 static inline void
4489 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4490 unsigned HOST_WIDE_INT unsigned_val)
4491 {
4492 dw_attr_node attr;
4493
4494 attr.dw_attr = attr_kind;
4495 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4496 attr.dw_attr_val.val_entry = NULL;
4497 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4498 add_dwarf_attr (die, &attr);
4499 }
4500
4501 static inline unsigned HOST_WIDE_INT
4502 AT_unsigned (dw_attr_node *a)
4503 {
4504 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4505 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4506 return a->dw_attr_val.v.val_unsigned;
4507 }
4508
4509 /* Add an unsigned wide integer attribute value to a DIE. */
4510
4511 static inline void
4512 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4513 const wide_int& w)
4514 {
4515 dw_attr_node attr;
4516
4517 attr.dw_attr = attr_kind;
4518 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4519 attr.dw_attr_val.val_entry = NULL;
4520 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4521 *attr.dw_attr_val.v.val_wide = w;
4522 add_dwarf_attr (die, &attr);
4523 }
4524
4525 /* Add an unsigned double integer attribute value to a DIE. */
4526
4527 static inline void
4528 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4529 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4530 {
4531 dw_attr_node attr;
4532
4533 attr.dw_attr = attr_kind;
4534 attr.dw_attr_val.val_class = dw_val_class_const_double;
4535 attr.dw_attr_val.val_entry = NULL;
4536 attr.dw_attr_val.v.val_double.high = high;
4537 attr.dw_attr_val.v.val_double.low = low;
4538 add_dwarf_attr (die, &attr);
4539 }
4540
4541 /* Add a floating point attribute value to a DIE and return it. */
4542
4543 static inline void
4544 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4545 unsigned int length, unsigned int elt_size, unsigned char *array)
4546 {
4547 dw_attr_node attr;
4548
4549 attr.dw_attr = attr_kind;
4550 attr.dw_attr_val.val_class = dw_val_class_vec;
4551 attr.dw_attr_val.val_entry = NULL;
4552 attr.dw_attr_val.v.val_vec.length = length;
4553 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4554 attr.dw_attr_val.v.val_vec.array = array;
4555 add_dwarf_attr (die, &attr);
4556 }
4557
4558 /* Add an 8-byte data attribute value to a DIE. */
4559
4560 static inline void
4561 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4562 unsigned char data8[8])
4563 {
4564 dw_attr_node attr;
4565
4566 attr.dw_attr = attr_kind;
4567 attr.dw_attr_val.val_class = dw_val_class_data8;
4568 attr.dw_attr_val.val_entry = NULL;
4569 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4570 add_dwarf_attr (die, &attr);
4571 }
4572
4573 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4574 dwarf_split_debug_info, address attributes in dies destined for the
4575 final executable have force_direct set to avoid using indexed
4576 references. */
4577
4578 static inline void
4579 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4580 bool force_direct)
4581 {
4582 dw_attr_node attr;
4583 char * lbl_id;
4584
4585 lbl_id = xstrdup (lbl_low);
4586 attr.dw_attr = DW_AT_low_pc;
4587 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4588 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4589 if (dwarf_split_debug_info && !force_direct)
4590 attr.dw_attr_val.val_entry
4591 = add_addr_table_entry (lbl_id, ate_kind_label);
4592 else
4593 attr.dw_attr_val.val_entry = NULL;
4594 add_dwarf_attr (die, &attr);
4595
4596 attr.dw_attr = DW_AT_high_pc;
4597 if (dwarf_version < 4)
4598 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4599 else
4600 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4601 lbl_id = xstrdup (lbl_high);
4602 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4603 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4604 && dwarf_split_debug_info && !force_direct)
4605 attr.dw_attr_val.val_entry
4606 = add_addr_table_entry (lbl_id, ate_kind_label);
4607 else
4608 attr.dw_attr_val.val_entry = NULL;
4609 add_dwarf_attr (die, &attr);
4610 }
4611
4612 /* Hash and equality functions for debug_str_hash. */
4613
4614 hashval_t
4615 indirect_string_hasher::hash (indirect_string_node *x)
4616 {
4617 return htab_hash_string (x->str);
4618 }
4619
4620 bool
4621 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4622 {
4623 return strcmp (x1->str, x2) == 0;
4624 }
4625
4626 /* Add STR to the given string hash table. */
4627
4628 static struct indirect_string_node *
4629 find_AT_string_in_table (const char *str,
4630 hash_table<indirect_string_hasher> *table,
4631 enum insert_option insert = INSERT)
4632 {
4633 struct indirect_string_node *node;
4634
4635 indirect_string_node **slot
4636 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4637 if (*slot == NULL)
4638 {
4639 node = ggc_cleared_alloc<indirect_string_node> ();
4640 node->str = ggc_strdup (str);
4641 *slot = node;
4642 }
4643 else
4644 node = *slot;
4645
4646 node->refcount++;
4647 return node;
4648 }
4649
4650 /* Add STR to the indirect string hash table. */
4651
4652 static struct indirect_string_node *
4653 find_AT_string (const char *str, enum insert_option insert = INSERT)
4654 {
4655 if (! debug_str_hash)
4656 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4657
4658 return find_AT_string_in_table (str, debug_str_hash, insert);
4659 }
4660
4661 /* Add a string attribute value to a DIE. */
4662
4663 static inline void
4664 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4665 {
4666 dw_attr_node attr;
4667 struct indirect_string_node *node;
4668
4669 node = find_AT_string (str);
4670
4671 attr.dw_attr = attr_kind;
4672 attr.dw_attr_val.val_class = dw_val_class_str;
4673 attr.dw_attr_val.val_entry = NULL;
4674 attr.dw_attr_val.v.val_str = node;
4675 add_dwarf_attr (die, &attr);
4676 }
4677
4678 static inline const char *
4679 AT_string (dw_attr_node *a)
4680 {
4681 gcc_assert (a && AT_class (a) == dw_val_class_str);
4682 return a->dw_attr_val.v.val_str->str;
4683 }
4684
4685 /* Call this function directly to bypass AT_string_form's logic to put
4686 the string inline in the die. */
4687
4688 static void
4689 set_indirect_string (struct indirect_string_node *node)
4690 {
4691 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4692 /* Already indirect is a no op. */
4693 if (node->form == DW_FORM_strp
4694 || node->form == DW_FORM_line_strp
4695 || node->form == dwarf_FORM (DW_FORM_strx))
4696 {
4697 gcc_assert (node->label);
4698 return;
4699 }
4700 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4701 ++dw2_string_counter;
4702 node->label = xstrdup (label);
4703
4704 if (!dwarf_split_debug_info)
4705 {
4706 node->form = DW_FORM_strp;
4707 node->index = NOT_INDEXED;
4708 }
4709 else
4710 {
4711 node->form = dwarf_FORM (DW_FORM_strx);
4712 node->index = NO_INDEX_ASSIGNED;
4713 }
4714 }
4715
4716 /* A helper function for dwarf2out_finish, called to reset indirect
4717 string decisions done for early LTO dwarf output before fat object
4718 dwarf output. */
4719
4720 int
4721 reset_indirect_string (indirect_string_node **h, void *)
4722 {
4723 struct indirect_string_node *node = *h;
4724 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4725 {
4726 free (node->label);
4727 node->label = NULL;
4728 node->form = (dwarf_form) 0;
4729 node->index = 0;
4730 }
4731 return 1;
4732 }
4733
4734 /* Find out whether a string should be output inline in DIE
4735 or out-of-line in .debug_str section. */
4736
4737 static enum dwarf_form
4738 find_string_form (struct indirect_string_node *node)
4739 {
4740 unsigned int len;
4741
4742 if (node->form)
4743 return node->form;
4744
4745 len = strlen (node->str) + 1;
4746
4747 /* If the string is shorter or equal to the size of the reference, it is
4748 always better to put it inline. */
4749 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4750 return node->form = DW_FORM_string;
4751
4752 /* If we cannot expect the linker to merge strings in .debug_str
4753 section, only put it into .debug_str if it is worth even in this
4754 single module. */
4755 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4756 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4757 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4758 return node->form = DW_FORM_string;
4759
4760 set_indirect_string (node);
4761
4762 return node->form;
4763 }
4764
4765 /* Find out whether the string referenced from the attribute should be
4766 output inline in DIE or out-of-line in .debug_str section. */
4767
4768 static enum dwarf_form
4769 AT_string_form (dw_attr_node *a)
4770 {
4771 gcc_assert (a && AT_class (a) == dw_val_class_str);
4772 return find_string_form (a->dw_attr_val.v.val_str);
4773 }
4774
4775 /* Add a DIE reference attribute value to a DIE. */
4776
4777 static inline void
4778 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4779 {
4780 dw_attr_node attr;
4781 gcc_checking_assert (targ_die != NULL);
4782
4783 /* With LTO we can end up trying to reference something we didn't create
4784 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4785 if (targ_die == NULL)
4786 return;
4787
4788 attr.dw_attr = attr_kind;
4789 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4790 attr.dw_attr_val.val_entry = NULL;
4791 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4792 attr.dw_attr_val.v.val_die_ref.external = 0;
4793 add_dwarf_attr (die, &attr);
4794 }
4795
4796 /* Change DIE reference REF to point to NEW_DIE instead. */
4797
4798 static inline void
4799 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4800 {
4801 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4802 ref->dw_attr_val.v.val_die_ref.die = new_die;
4803 ref->dw_attr_val.v.val_die_ref.external = 0;
4804 }
4805
4806 /* Add an AT_specification attribute to a DIE, and also make the back
4807 pointer from the specification to the definition. */
4808
4809 static inline void
4810 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4811 {
4812 add_AT_die_ref (die, DW_AT_specification, targ_die);
4813 gcc_assert (!targ_die->die_definition);
4814 targ_die->die_definition = die;
4815 }
4816
4817 static inline dw_die_ref
4818 AT_ref (dw_attr_node *a)
4819 {
4820 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4821 return a->dw_attr_val.v.val_die_ref.die;
4822 }
4823
4824 static inline int
4825 AT_ref_external (dw_attr_node *a)
4826 {
4827 if (a && AT_class (a) == dw_val_class_die_ref)
4828 return a->dw_attr_val.v.val_die_ref.external;
4829
4830 return 0;
4831 }
4832
4833 static inline void
4834 set_AT_ref_external (dw_attr_node *a, int i)
4835 {
4836 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4837 a->dw_attr_val.v.val_die_ref.external = i;
4838 }
4839
4840 /* Add a location description attribute value to a DIE. */
4841
4842 static inline void
4843 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4844 {
4845 dw_attr_node attr;
4846
4847 attr.dw_attr = attr_kind;
4848 attr.dw_attr_val.val_class = dw_val_class_loc;
4849 attr.dw_attr_val.val_entry = NULL;
4850 attr.dw_attr_val.v.val_loc = loc;
4851 add_dwarf_attr (die, &attr);
4852 }
4853
4854 static inline dw_loc_descr_ref
4855 AT_loc (dw_attr_node *a)
4856 {
4857 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4858 return a->dw_attr_val.v.val_loc;
4859 }
4860
4861 static inline void
4862 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4863 {
4864 dw_attr_node attr;
4865
4866 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4867 return;
4868
4869 attr.dw_attr = attr_kind;
4870 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4871 attr.dw_attr_val.val_entry = NULL;
4872 attr.dw_attr_val.v.val_loc_list = loc_list;
4873 add_dwarf_attr (die, &attr);
4874 have_location_lists = true;
4875 }
4876
4877 static inline dw_loc_list_ref
4878 AT_loc_list (dw_attr_node *a)
4879 {
4880 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4881 return a->dw_attr_val.v.val_loc_list;
4882 }
4883
4884 /* Add a view list attribute to DIE. It must have a DW_AT_location
4885 attribute, because the view list complements the location list. */
4886
4887 static inline void
4888 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4889 {
4890 dw_attr_node attr;
4891
4892 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4893 return;
4894
4895 attr.dw_attr = attr_kind;
4896 attr.dw_attr_val.val_class = dw_val_class_view_list;
4897 attr.dw_attr_val.val_entry = NULL;
4898 attr.dw_attr_val.v.val_view_list = die;
4899 add_dwarf_attr (die, &attr);
4900 gcc_checking_assert (get_AT (die, DW_AT_location));
4901 gcc_assert (have_location_lists);
4902 }
4903
4904 /* Return a pointer to the location list referenced by the attribute.
4905 If the named attribute is a view list, look up the corresponding
4906 DW_AT_location attribute and return its location list. */
4907
4908 static inline dw_loc_list_ref *
4909 AT_loc_list_ptr (dw_attr_node *a)
4910 {
4911 gcc_assert (a);
4912 switch (AT_class (a))
4913 {
4914 case dw_val_class_loc_list:
4915 return &a->dw_attr_val.v.val_loc_list;
4916 case dw_val_class_view_list:
4917 {
4918 dw_attr_node *l;
4919 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4920 if (!l)
4921 return NULL;
4922 gcc_checking_assert (l + 1 == a);
4923 return AT_loc_list_ptr (l);
4924 }
4925 default:
4926 gcc_unreachable ();
4927 }
4928 }
4929
4930 /* Return the location attribute value associated with a view list
4931 attribute value. */
4932
4933 static inline dw_val_node *
4934 view_list_to_loc_list_val_node (dw_val_node *val)
4935 {
4936 gcc_assert (val->val_class == dw_val_class_view_list);
4937 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4938 if (!loc)
4939 return NULL;
4940 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4941 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4942 return &loc->dw_attr_val;
4943 }
4944
4945 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4946 {
4947 static hashval_t hash (addr_table_entry *);
4948 static bool equal (addr_table_entry *, addr_table_entry *);
4949 };
4950
4951 /* Table of entries into the .debug_addr section. */
4952
4953 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4954
4955 /* Hash an address_table_entry. */
4956
4957 hashval_t
4958 addr_hasher::hash (addr_table_entry *a)
4959 {
4960 inchash::hash hstate;
4961 switch (a->kind)
4962 {
4963 case ate_kind_rtx:
4964 hstate.add_int (0);
4965 break;
4966 case ate_kind_rtx_dtprel:
4967 hstate.add_int (1);
4968 break;
4969 case ate_kind_label:
4970 return htab_hash_string (a->addr.label);
4971 default:
4972 gcc_unreachable ();
4973 }
4974 inchash::add_rtx (a->addr.rtl, hstate);
4975 return hstate.end ();
4976 }
4977
4978 /* Determine equality for two address_table_entries. */
4979
4980 bool
4981 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4982 {
4983 if (a1->kind != a2->kind)
4984 return 0;
4985 switch (a1->kind)
4986 {
4987 case ate_kind_rtx:
4988 case ate_kind_rtx_dtprel:
4989 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4990 case ate_kind_label:
4991 return strcmp (a1->addr.label, a2->addr.label) == 0;
4992 default:
4993 gcc_unreachable ();
4994 }
4995 }
4996
4997 /* Initialize an addr_table_entry. */
4998
4999 void
5000 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5001 {
5002 e->kind = kind;
5003 switch (kind)
5004 {
5005 case ate_kind_rtx:
5006 case ate_kind_rtx_dtprel:
5007 e->addr.rtl = (rtx) addr;
5008 break;
5009 case ate_kind_label:
5010 e->addr.label = (char *) addr;
5011 break;
5012 }
5013 e->refcount = 0;
5014 e->index = NO_INDEX_ASSIGNED;
5015 }
5016
5017 /* Add attr to the address table entry to the table. Defer setting an
5018 index until output time. */
5019
5020 static addr_table_entry *
5021 add_addr_table_entry (void *addr, enum ate_kind kind)
5022 {
5023 addr_table_entry *node;
5024 addr_table_entry finder;
5025
5026 gcc_assert (dwarf_split_debug_info);
5027 if (! addr_index_table)
5028 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5029 init_addr_table_entry (&finder, kind, addr);
5030 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5031
5032 if (*slot == HTAB_EMPTY_ENTRY)
5033 {
5034 node = ggc_cleared_alloc<addr_table_entry> ();
5035 init_addr_table_entry (node, kind, addr);
5036 *slot = node;
5037 }
5038 else
5039 node = *slot;
5040
5041 node->refcount++;
5042 return node;
5043 }
5044
5045 /* Remove an entry from the addr table by decrementing its refcount.
5046 Strictly, decrementing the refcount would be enough, but the
5047 assertion that the entry is actually in the table has found
5048 bugs. */
5049
5050 static void
5051 remove_addr_table_entry (addr_table_entry *entry)
5052 {
5053 gcc_assert (dwarf_split_debug_info && addr_index_table);
5054 /* After an index is assigned, the table is frozen. */
5055 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5056 entry->refcount--;
5057 }
5058
5059 /* Given a location list, remove all addresses it refers to from the
5060 address_table. */
5061
5062 static void
5063 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5064 {
5065 for (; descr; descr = descr->dw_loc_next)
5066 if (descr->dw_loc_oprnd1.val_entry != NULL)
5067 {
5068 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5069 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5070 }
5071 }
5072
5073 /* A helper function for dwarf2out_finish called through
5074 htab_traverse. Assign an addr_table_entry its index. All entries
5075 must be collected into the table when this function is called,
5076 because the indexing code relies on htab_traverse to traverse nodes
5077 in the same order for each run. */
5078
5079 int
5080 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5081 {
5082 addr_table_entry *node = *h;
5083
5084 /* Don't index unreferenced nodes. */
5085 if (node->refcount == 0)
5086 return 1;
5087
5088 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5089 node->index = *index;
5090 *index += 1;
5091
5092 return 1;
5093 }
5094
5095 /* Add an address constant attribute value to a DIE. When using
5096 dwarf_split_debug_info, address attributes in dies destined for the
5097 final executable should be direct references--setting the parameter
5098 force_direct ensures this behavior. */
5099
5100 static inline void
5101 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5102 bool force_direct)
5103 {
5104 dw_attr_node attr;
5105
5106 attr.dw_attr = attr_kind;
5107 attr.dw_attr_val.val_class = dw_val_class_addr;
5108 attr.dw_attr_val.v.val_addr = addr;
5109 if (dwarf_split_debug_info && !force_direct)
5110 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5111 else
5112 attr.dw_attr_val.val_entry = NULL;
5113 add_dwarf_attr (die, &attr);
5114 }
5115
5116 /* Get the RTX from to an address DIE attribute. */
5117
5118 static inline rtx
5119 AT_addr (dw_attr_node *a)
5120 {
5121 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5122 return a->dw_attr_val.v.val_addr;
5123 }
5124
5125 /* Add a file attribute value to a DIE. */
5126
5127 static inline void
5128 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5129 struct dwarf_file_data *fd)
5130 {
5131 dw_attr_node attr;
5132
5133 attr.dw_attr = attr_kind;
5134 attr.dw_attr_val.val_class = dw_val_class_file;
5135 attr.dw_attr_val.val_entry = NULL;
5136 attr.dw_attr_val.v.val_file = fd;
5137 add_dwarf_attr (die, &attr);
5138 }
5139
5140 /* Get the dwarf_file_data from a file DIE attribute. */
5141
5142 static inline struct dwarf_file_data *
5143 AT_file (dw_attr_node *a)
5144 {
5145 gcc_assert (a && (AT_class (a) == dw_val_class_file
5146 || AT_class (a) == dw_val_class_file_implicit));
5147 return a->dw_attr_val.v.val_file;
5148 }
5149
5150 /* Add a vms delta attribute value to a DIE. */
5151
5152 static inline void
5153 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5154 const char *lbl1, const char *lbl2)
5155 {
5156 dw_attr_node attr;
5157
5158 attr.dw_attr = attr_kind;
5159 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5160 attr.dw_attr_val.val_entry = NULL;
5161 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5162 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5163 add_dwarf_attr (die, &attr);
5164 }
5165
5166 /* Add a symbolic view identifier attribute value to a DIE. */
5167
5168 static inline void
5169 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5170 const char *view_label)
5171 {
5172 dw_attr_node attr;
5173
5174 attr.dw_attr = attr_kind;
5175 attr.dw_attr_val.val_class = dw_val_class_symview;
5176 attr.dw_attr_val.val_entry = NULL;
5177 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5178 add_dwarf_attr (die, &attr);
5179 }
5180
5181 /* Add a label identifier attribute value to a DIE. */
5182
5183 static inline void
5184 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5185 const char *lbl_id)
5186 {
5187 dw_attr_node attr;
5188
5189 attr.dw_attr = attr_kind;
5190 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5191 attr.dw_attr_val.val_entry = NULL;
5192 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5193 if (dwarf_split_debug_info)
5194 attr.dw_attr_val.val_entry
5195 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5196 ate_kind_label);
5197 add_dwarf_attr (die, &attr);
5198 }
5199
5200 /* Add a section offset attribute value to a DIE, an offset into the
5201 debug_line section. */
5202
5203 static inline void
5204 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5205 const char *label)
5206 {
5207 dw_attr_node attr;
5208
5209 attr.dw_attr = attr_kind;
5210 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5211 attr.dw_attr_val.val_entry = NULL;
5212 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5213 add_dwarf_attr (die, &attr);
5214 }
5215
5216 /* Add a section offset attribute value to a DIE, an offset into the
5217 debug_macinfo section. */
5218
5219 static inline void
5220 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5221 const char *label)
5222 {
5223 dw_attr_node attr;
5224
5225 attr.dw_attr = attr_kind;
5226 attr.dw_attr_val.val_class = dw_val_class_macptr;
5227 attr.dw_attr_val.val_entry = NULL;
5228 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5229 add_dwarf_attr (die, &attr);
5230 }
5231
5232 /* Add a range_list attribute value to a DIE. When using
5233 dwarf_split_debug_info, address attributes in dies destined for the
5234 final executable should be direct references--setting the parameter
5235 force_direct ensures this behavior. */
5236
5237 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5238 #define RELOCATED_OFFSET (NULL)
5239
5240 static void
5241 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5242 long unsigned int offset, bool force_direct)
5243 {
5244 dw_attr_node attr;
5245
5246 attr.dw_attr = attr_kind;
5247 attr.dw_attr_val.val_class = dw_val_class_range_list;
5248 /* For the range_list attribute, use val_entry to store whether the
5249 offset should follow split-debug-info or normal semantics. This
5250 value is read in output_range_list_offset. */
5251 if (dwarf_split_debug_info && !force_direct)
5252 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5253 else
5254 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5255 attr.dw_attr_val.v.val_offset = offset;
5256 add_dwarf_attr (die, &attr);
5257 }
5258
5259 /* Return the start label of a delta attribute. */
5260
5261 static inline const char *
5262 AT_vms_delta1 (dw_attr_node *a)
5263 {
5264 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5265 return a->dw_attr_val.v.val_vms_delta.lbl1;
5266 }
5267
5268 /* Return the end label of a delta attribute. */
5269
5270 static inline const char *
5271 AT_vms_delta2 (dw_attr_node *a)
5272 {
5273 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5274 return a->dw_attr_val.v.val_vms_delta.lbl2;
5275 }
5276
5277 static inline const char *
5278 AT_lbl (dw_attr_node *a)
5279 {
5280 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5281 || AT_class (a) == dw_val_class_lineptr
5282 || AT_class (a) == dw_val_class_macptr
5283 || AT_class (a) == dw_val_class_loclistsptr
5284 || AT_class (a) == dw_val_class_high_pc));
5285 return a->dw_attr_val.v.val_lbl_id;
5286 }
5287
5288 /* Get the attribute of type attr_kind. */
5289
5290 static dw_attr_node *
5291 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5292 {
5293 dw_attr_node *a;
5294 unsigned ix;
5295 dw_die_ref spec = NULL;
5296
5297 if (! die)
5298 return NULL;
5299
5300 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5301 if (a->dw_attr == attr_kind)
5302 return a;
5303 else if (a->dw_attr == DW_AT_specification
5304 || a->dw_attr == DW_AT_abstract_origin)
5305 spec = AT_ref (a);
5306
5307 if (spec)
5308 return get_AT (spec, attr_kind);
5309
5310 return NULL;
5311 }
5312
5313 /* Returns the parent of the declaration of DIE. */
5314
5315 static dw_die_ref
5316 get_die_parent (dw_die_ref die)
5317 {
5318 dw_die_ref t;
5319
5320 if (!die)
5321 return NULL;
5322
5323 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5324 || (t = get_AT_ref (die, DW_AT_specification)))
5325 die = t;
5326
5327 return die->die_parent;
5328 }
5329
5330 /* Return the "low pc" attribute value, typically associated with a subprogram
5331 DIE. Return null if the "low pc" attribute is either not present, or if it
5332 cannot be represented as an assembler label identifier. */
5333
5334 static inline const char *
5335 get_AT_low_pc (dw_die_ref die)
5336 {
5337 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5338
5339 return a ? AT_lbl (a) : NULL;
5340 }
5341
5342 /* Return the value of the string attribute designated by ATTR_KIND, or
5343 NULL if it is not present. */
5344
5345 static inline const char *
5346 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5347 {
5348 dw_attr_node *a = get_AT (die, attr_kind);
5349
5350 return a ? AT_string (a) : NULL;
5351 }
5352
5353 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5354 if it is not present. */
5355
5356 static inline int
5357 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5358 {
5359 dw_attr_node *a = get_AT (die, attr_kind);
5360
5361 return a ? AT_flag (a) : 0;
5362 }
5363
5364 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5365 if it is not present. */
5366
5367 static inline unsigned
5368 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5369 {
5370 dw_attr_node *a = get_AT (die, attr_kind);
5371
5372 return a ? AT_unsigned (a) : 0;
5373 }
5374
5375 static inline dw_die_ref
5376 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5377 {
5378 dw_attr_node *a = get_AT (die, attr_kind);
5379
5380 return a ? AT_ref (a) : NULL;
5381 }
5382
5383 static inline struct dwarf_file_data *
5384 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5385 {
5386 dw_attr_node *a = get_AT (die, attr_kind);
5387
5388 return a ? AT_file (a) : NULL;
5389 }
5390
5391 /* Return TRUE if the language is C. */
5392
5393 static inline bool
5394 is_c (void)
5395 {
5396 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5397
5398 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5399 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5400
5401
5402 }
5403
5404 /* Return TRUE if the language is C++. */
5405
5406 static inline bool
5407 is_cxx (void)
5408 {
5409 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5410
5411 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5412 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5413 }
5414
5415 /* Return TRUE if DECL was created by the C++ frontend. */
5416
5417 static bool
5418 is_cxx (const_tree decl)
5419 {
5420 if (in_lto_p)
5421 {
5422 const_tree context = get_ultimate_context (decl);
5423 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5424 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5425 }
5426 return is_cxx ();
5427 }
5428
5429 /* Return TRUE if the language is Fortran. */
5430
5431 static inline bool
5432 is_fortran (void)
5433 {
5434 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5435
5436 return (lang == DW_LANG_Fortran77
5437 || lang == DW_LANG_Fortran90
5438 || lang == DW_LANG_Fortran95
5439 || lang == DW_LANG_Fortran03
5440 || lang == DW_LANG_Fortran08);
5441 }
5442
5443 static inline bool
5444 is_fortran (const_tree decl)
5445 {
5446 if (in_lto_p)
5447 {
5448 const_tree context = get_ultimate_context (decl);
5449 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5450 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5451 "GNU Fortran", 11) == 0
5452 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5453 "GNU F77") == 0);
5454 }
5455 return is_fortran ();
5456 }
5457
5458 /* Return TRUE if the language is Ada. */
5459
5460 static inline bool
5461 is_ada (void)
5462 {
5463 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5464
5465 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5466 }
5467
5468 /* Return TRUE if the language is D. */
5469
5470 static inline bool
5471 is_dlang (void)
5472 {
5473 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5474
5475 return lang == DW_LANG_D;
5476 }
5477
5478 /* Remove the specified attribute if present. Return TRUE if removal
5479 was successful. */
5480
5481 static bool
5482 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5483 {
5484 dw_attr_node *a;
5485 unsigned ix;
5486
5487 if (! die)
5488 return false;
5489
5490 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5491 if (a->dw_attr == attr_kind)
5492 {
5493 if (AT_class (a) == dw_val_class_str)
5494 if (a->dw_attr_val.v.val_str->refcount)
5495 a->dw_attr_val.v.val_str->refcount--;
5496
5497 /* vec::ordered_remove should help reduce the number of abbrevs
5498 that are needed. */
5499 die->die_attr->ordered_remove (ix);
5500 return true;
5501 }
5502 return false;
5503 }
5504
5505 /* Remove CHILD from its parent. PREV must have the property that
5506 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5507
5508 static void
5509 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5510 {
5511 gcc_assert (child->die_parent == prev->die_parent);
5512 gcc_assert (prev->die_sib == child);
5513 if (prev == child)
5514 {
5515 gcc_assert (child->die_parent->die_child == child);
5516 prev = NULL;
5517 }
5518 else
5519 prev->die_sib = child->die_sib;
5520 if (child->die_parent->die_child == child)
5521 child->die_parent->die_child = prev;
5522 child->die_sib = NULL;
5523 }
5524
5525 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5526 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5527
5528 static void
5529 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5530 {
5531 dw_die_ref parent = old_child->die_parent;
5532
5533 gcc_assert (parent == prev->die_parent);
5534 gcc_assert (prev->die_sib == old_child);
5535
5536 new_child->die_parent = parent;
5537 if (prev == old_child)
5538 {
5539 gcc_assert (parent->die_child == old_child);
5540 new_child->die_sib = new_child;
5541 }
5542 else
5543 {
5544 prev->die_sib = new_child;
5545 new_child->die_sib = old_child->die_sib;
5546 }
5547 if (old_child->die_parent->die_child == old_child)
5548 old_child->die_parent->die_child = new_child;
5549 old_child->die_sib = NULL;
5550 }
5551
5552 /* Move all children from OLD_PARENT to NEW_PARENT. */
5553
5554 static void
5555 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5556 {
5557 dw_die_ref c;
5558 new_parent->die_child = old_parent->die_child;
5559 old_parent->die_child = NULL;
5560 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5561 }
5562
5563 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5564 matches TAG. */
5565
5566 static void
5567 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5568 {
5569 dw_die_ref c;
5570
5571 c = die->die_child;
5572 if (c) do {
5573 dw_die_ref prev = c;
5574 c = c->die_sib;
5575 while (c->die_tag == tag)
5576 {
5577 remove_child_with_prev (c, prev);
5578 c->die_parent = NULL;
5579 /* Might have removed every child. */
5580 if (die->die_child == NULL)
5581 return;
5582 c = prev->die_sib;
5583 }
5584 } while (c != die->die_child);
5585 }
5586
5587 /* Add a CHILD_DIE as the last child of DIE. */
5588
5589 static void
5590 add_child_die (dw_die_ref die, dw_die_ref child_die)
5591 {
5592 /* FIXME this should probably be an assert. */
5593 if (! die || ! child_die)
5594 return;
5595 gcc_assert (die != child_die);
5596
5597 child_die->die_parent = die;
5598 if (die->die_child)
5599 {
5600 child_die->die_sib = die->die_child->die_sib;
5601 die->die_child->die_sib = child_die;
5602 }
5603 else
5604 child_die->die_sib = child_die;
5605 die->die_child = child_die;
5606 }
5607
5608 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5609
5610 static void
5611 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5612 dw_die_ref after_die)
5613 {
5614 gcc_assert (die
5615 && child_die
5616 && after_die
5617 && die->die_child
5618 && die != child_die);
5619
5620 child_die->die_parent = die;
5621 child_die->die_sib = after_die->die_sib;
5622 after_die->die_sib = child_die;
5623 if (die->die_child == after_die)
5624 die->die_child = child_die;
5625 }
5626
5627 /* Unassociate CHILD from its parent, and make its parent be
5628 NEW_PARENT. */
5629
5630 static void
5631 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5632 {
5633 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5634 if (p->die_sib == child)
5635 {
5636 remove_child_with_prev (child, p);
5637 break;
5638 }
5639 add_child_die (new_parent, child);
5640 }
5641
5642 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5643 is the specification, to the end of PARENT's list of children.
5644 This is done by removing and re-adding it. */
5645
5646 static void
5647 splice_child_die (dw_die_ref parent, dw_die_ref child)
5648 {
5649 /* We want the declaration DIE from inside the class, not the
5650 specification DIE at toplevel. */
5651 if (child->die_parent != parent)
5652 {
5653 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5654
5655 if (tmp)
5656 child = tmp;
5657 }
5658
5659 gcc_assert (child->die_parent == parent
5660 || (child->die_parent
5661 == get_AT_ref (parent, DW_AT_specification)));
5662
5663 reparent_child (child, parent);
5664 }
5665
5666 /* Create and return a new die with TAG_VALUE as tag. */
5667
5668 static inline dw_die_ref
5669 new_die_raw (enum dwarf_tag tag_value)
5670 {
5671 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5672 die->die_tag = tag_value;
5673 return die;
5674 }
5675
5676 /* Create and return a new die with a parent of PARENT_DIE. If
5677 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5678 associated tree T must be supplied to determine parenthood
5679 later. */
5680
5681 static inline dw_die_ref
5682 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5683 {
5684 dw_die_ref die = new_die_raw (tag_value);
5685
5686 if (parent_die != NULL)
5687 add_child_die (parent_die, die);
5688 else
5689 {
5690 limbo_die_node *limbo_node;
5691
5692 /* No DIEs created after early dwarf should end up in limbo,
5693 because the limbo list should not persist past LTO
5694 streaming. */
5695 if (tag_value != DW_TAG_compile_unit
5696 /* These are allowed because they're generated while
5697 breaking out COMDAT units late. */
5698 && tag_value != DW_TAG_type_unit
5699 && tag_value != DW_TAG_skeleton_unit
5700 && !early_dwarf
5701 /* Allow nested functions to live in limbo because they will
5702 only temporarily live there, as decls_for_scope will fix
5703 them up. */
5704 && (TREE_CODE (t) != FUNCTION_DECL
5705 || !decl_function_context (t))
5706 /* Same as nested functions above but for types. Types that
5707 are local to a function will be fixed in
5708 decls_for_scope. */
5709 && (!RECORD_OR_UNION_TYPE_P (t)
5710 || !TYPE_CONTEXT (t)
5711 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5712 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5713 especially in the ltrans stage, but once we implement LTO
5714 dwarf streaming, we should remove this exception. */
5715 && !in_lto_p)
5716 {
5717 fprintf (stderr, "symbol ended up in limbo too late:");
5718 debug_generic_stmt (t);
5719 gcc_unreachable ();
5720 }
5721
5722 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5723 limbo_node->die = die;
5724 limbo_node->created_for = t;
5725 limbo_node->next = limbo_die_list;
5726 limbo_die_list = limbo_node;
5727 }
5728
5729 return die;
5730 }
5731
5732 /* Return the DIE associated with the given type specifier. */
5733
5734 static inline dw_die_ref
5735 lookup_type_die (tree type)
5736 {
5737 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5738 if (die && die->removed)
5739 {
5740 TYPE_SYMTAB_DIE (type) = NULL;
5741 return NULL;
5742 }
5743 return die;
5744 }
5745
5746 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5747 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5748 anonymous type instead the one of the naming typedef. */
5749
5750 static inline dw_die_ref
5751 strip_naming_typedef (tree type, dw_die_ref type_die)
5752 {
5753 if (type
5754 && TREE_CODE (type) == RECORD_TYPE
5755 && type_die
5756 && type_die->die_tag == DW_TAG_typedef
5757 && is_naming_typedef_decl (TYPE_NAME (type)))
5758 type_die = get_AT_ref (type_die, DW_AT_type);
5759 return type_die;
5760 }
5761
5762 /* Like lookup_type_die, but if type is an anonymous type named by a
5763 typedef[1], return the DIE of the anonymous type instead the one of
5764 the naming typedef. This is because in gen_typedef_die, we did
5765 equate the anonymous struct named by the typedef with the DIE of
5766 the naming typedef. So by default, lookup_type_die on an anonymous
5767 struct yields the DIE of the naming typedef.
5768
5769 [1]: Read the comment of is_naming_typedef_decl to learn about what
5770 a naming typedef is. */
5771
5772 static inline dw_die_ref
5773 lookup_type_die_strip_naming_typedef (tree type)
5774 {
5775 dw_die_ref die = lookup_type_die (type);
5776 return strip_naming_typedef (type, die);
5777 }
5778
5779 /* Equate a DIE to a given type specifier. */
5780
5781 static inline void
5782 equate_type_number_to_die (tree type, dw_die_ref type_die)
5783 {
5784 TYPE_SYMTAB_DIE (type) = type_die;
5785 }
5786
5787 static dw_die_ref maybe_create_die_with_external_ref (tree);
5788 struct GTY(()) sym_off_pair
5789 {
5790 const char * GTY((skip)) sym;
5791 unsigned HOST_WIDE_INT off;
5792 };
5793 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5794
5795 /* Returns a hash value for X (which really is a die_struct). */
5796
5797 inline hashval_t
5798 decl_die_hasher::hash (die_node *x)
5799 {
5800 return (hashval_t) x->decl_id;
5801 }
5802
5803 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5804
5805 inline bool
5806 decl_die_hasher::equal (die_node *x, tree y)
5807 {
5808 return (x->decl_id == DECL_UID (y));
5809 }
5810
5811 /* Return the DIE associated with a given declaration. */
5812
5813 static inline dw_die_ref
5814 lookup_decl_die (tree decl)
5815 {
5816 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5817 NO_INSERT);
5818 if (!die)
5819 {
5820 if (in_lto_p)
5821 return maybe_create_die_with_external_ref (decl);
5822 return NULL;
5823 }
5824 if ((*die)->removed)
5825 {
5826 decl_die_table->clear_slot (die);
5827 return NULL;
5828 }
5829 return *die;
5830 }
5831
5832
5833 /* Return the DIE associated with BLOCK. */
5834
5835 static inline dw_die_ref
5836 lookup_block_die (tree block)
5837 {
5838 dw_die_ref die = BLOCK_DIE (block);
5839 if (!die && in_lto_p)
5840 return maybe_create_die_with_external_ref (block);
5841 return die;
5842 }
5843
5844 /* Associate DIE with BLOCK. */
5845
5846 static inline void
5847 equate_block_to_die (tree block, dw_die_ref die)
5848 {
5849 BLOCK_DIE (block) = die;
5850 }
5851 #undef BLOCK_DIE
5852
5853
5854 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5855 style reference. Return true if we found one refering to a DIE for
5856 DECL, otherwise return false. */
5857
5858 static bool
5859 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5860 unsigned HOST_WIDE_INT *off)
5861 {
5862 dw_die_ref die;
5863
5864 if (in_lto_p)
5865 {
5866 /* During WPA stage and incremental linking we use a hash-map
5867 to store the decl <-> label + offset map. */
5868 if (!external_die_map)
5869 return false;
5870 sym_off_pair *desc = external_die_map->get (decl);
5871 if (!desc)
5872 return false;
5873 *sym = desc->sym;
5874 *off = desc->off;
5875 return true;
5876 }
5877
5878 if (TREE_CODE (decl) == BLOCK)
5879 die = lookup_block_die (decl);
5880 else
5881 die = lookup_decl_die (decl);
5882 if (!die)
5883 return false;
5884
5885 /* Similar to get_ref_die_offset_label, but using the "correct"
5886 label. */
5887 *off = die->die_offset;
5888 while (die->die_parent)
5889 die = die->die_parent;
5890 /* For the containing CU DIE we compute a die_symbol in
5891 compute_comp_unit_symbol. */
5892 gcc_assert (die->die_tag == DW_TAG_compile_unit
5893 && die->die_id.die_symbol != NULL);
5894 *sym = die->die_id.die_symbol;
5895 return true;
5896 }
5897
5898 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5899
5900 static void
5901 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5902 const char *symbol, HOST_WIDE_INT offset)
5903 {
5904 /* Create a fake DIE that contains the reference. Don't use
5905 new_die because we don't want to end up in the limbo list. */
5906 /* ??? We probably want to share these, thus put a ref to the DIE
5907 we create here to the external_die_map entry. */
5908 dw_die_ref ref = new_die_raw (die->die_tag);
5909 ref->die_id.die_symbol = symbol;
5910 ref->die_offset = offset;
5911 ref->with_offset = 1;
5912 add_AT_die_ref (die, attr_kind, ref);
5913 }
5914
5915 /* Create a DIE for DECL if required and add a reference to a DIE
5916 at SYMBOL + OFFSET which contains attributes dumped early. */
5917
5918 static void
5919 dwarf2out_register_external_die (tree decl, const char *sym,
5920 unsigned HOST_WIDE_INT off)
5921 {
5922 if (debug_info_level == DINFO_LEVEL_NONE)
5923 return;
5924
5925 if (!external_die_map)
5926 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5927 gcc_checking_assert (!external_die_map->get (decl));
5928 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5929 external_die_map->put (decl, p);
5930 }
5931
5932 /* If we have a registered external DIE for DECL return a new DIE for
5933 the concrete instance with an appropriate abstract origin. */
5934
5935 static dw_die_ref
5936 maybe_create_die_with_external_ref (tree decl)
5937 {
5938 if (!external_die_map)
5939 return NULL;
5940 sym_off_pair *desc = external_die_map->get (decl);
5941 if (!desc)
5942 return NULL;
5943
5944 const char *sym = desc->sym;
5945 unsigned HOST_WIDE_INT off = desc->off;
5946
5947 in_lto_p = false;
5948 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5949 ? lookup_block_die (decl) : lookup_decl_die (decl));
5950 gcc_assert (!die);
5951 in_lto_p = true;
5952
5953 tree ctx;
5954 dw_die_ref parent = NULL;
5955 /* Need to lookup a DIE for the decls context - the containing
5956 function or translation unit. */
5957 if (TREE_CODE (decl) == BLOCK)
5958 {
5959 ctx = BLOCK_SUPERCONTEXT (decl);
5960 /* ??? We do not output DIEs for all scopes thus skip as
5961 many DIEs as needed. */
5962 while (TREE_CODE (ctx) == BLOCK
5963 && !lookup_block_die (ctx))
5964 ctx = BLOCK_SUPERCONTEXT (ctx);
5965 }
5966 else
5967 ctx = DECL_CONTEXT (decl);
5968 /* Peel types in the context stack. */
5969 while (ctx && TYPE_P (ctx))
5970 ctx = TYPE_CONTEXT (ctx);
5971 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5972 if (debug_info_level <= DINFO_LEVEL_TERSE)
5973 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5974 ctx = DECL_CONTEXT (ctx);
5975 if (ctx)
5976 {
5977 if (TREE_CODE (ctx) == BLOCK)
5978 parent = lookup_block_die (ctx);
5979 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5980 /* Keep the 1:1 association during WPA. */
5981 && !flag_wpa
5982 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5983 /* Otherwise all late annotations go to the main CU which
5984 imports the original CUs. */
5985 parent = comp_unit_die ();
5986 else if (TREE_CODE (ctx) == FUNCTION_DECL
5987 && TREE_CODE (decl) != FUNCTION_DECL
5988 && TREE_CODE (decl) != PARM_DECL
5989 && TREE_CODE (decl) != RESULT_DECL
5990 && TREE_CODE (decl) != BLOCK)
5991 /* Leave function local entities parent determination to when
5992 we process scope vars. */
5993 ;
5994 else
5995 parent = lookup_decl_die (ctx);
5996 }
5997 else
5998 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5999 Handle this case gracefully by globalizing stuff. */
6000 parent = comp_unit_die ();
6001 /* Create a DIE "stub". */
6002 switch (TREE_CODE (decl))
6003 {
6004 case TRANSLATION_UNIT_DECL:
6005 {
6006 die = comp_unit_die ();
6007 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6008 to create a DIE for the original CUs. */
6009 return die;
6010 }
6011 case NAMESPACE_DECL:
6012 if (is_fortran (decl))
6013 die = new_die (DW_TAG_module, parent, decl);
6014 else
6015 die = new_die (DW_TAG_namespace, parent, decl);
6016 break;
6017 case FUNCTION_DECL:
6018 die = new_die (DW_TAG_subprogram, parent, decl);
6019 break;
6020 case VAR_DECL:
6021 die = new_die (DW_TAG_variable, parent, decl);
6022 break;
6023 case RESULT_DECL:
6024 die = new_die (DW_TAG_variable, parent, decl);
6025 break;
6026 case PARM_DECL:
6027 die = new_die (DW_TAG_formal_parameter, parent, decl);
6028 break;
6029 case CONST_DECL:
6030 die = new_die (DW_TAG_constant, parent, decl);
6031 break;
6032 case LABEL_DECL:
6033 die = new_die (DW_TAG_label, parent, decl);
6034 break;
6035 case BLOCK:
6036 die = new_die (DW_TAG_lexical_block, parent, decl);
6037 break;
6038 default:
6039 gcc_unreachable ();
6040 }
6041 if (TREE_CODE (decl) == BLOCK)
6042 equate_block_to_die (decl, die);
6043 else
6044 equate_decl_number_to_die (decl, die);
6045
6046 add_desc_attribute (die, decl);
6047
6048 /* Add a reference to the DIE providing early debug at $sym + off. */
6049 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6050
6051 return die;
6052 }
6053
6054 /* Returns a hash value for X (which really is a var_loc_list). */
6055
6056 inline hashval_t
6057 decl_loc_hasher::hash (var_loc_list *x)
6058 {
6059 return (hashval_t) x->decl_id;
6060 }
6061
6062 /* Return nonzero if decl_id of var_loc_list X is the same as
6063 UID of decl *Y. */
6064
6065 inline bool
6066 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6067 {
6068 return (x->decl_id == DECL_UID (y));
6069 }
6070
6071 /* Return the var_loc list associated with a given declaration. */
6072
6073 static inline var_loc_list *
6074 lookup_decl_loc (const_tree decl)
6075 {
6076 if (!decl_loc_table)
6077 return NULL;
6078 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6079 }
6080
6081 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6082
6083 inline hashval_t
6084 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6085 {
6086 return (hashval_t) x->decl_id;
6087 }
6088
6089 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6090 UID of decl *Y. */
6091
6092 inline bool
6093 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6094 {
6095 return (x->decl_id == DECL_UID (y));
6096 }
6097
6098 /* Equate a DIE to a particular declaration. */
6099
6100 static void
6101 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6102 {
6103 unsigned int decl_id = DECL_UID (decl);
6104
6105 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6106 decl_die->decl_id = decl_id;
6107 }
6108
6109 /* Return how many bits covers PIECE EXPR_LIST. */
6110
6111 static HOST_WIDE_INT
6112 decl_piece_bitsize (rtx piece)
6113 {
6114 int ret = (int) GET_MODE (piece);
6115 if (ret)
6116 return ret;
6117 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6118 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6119 return INTVAL (XEXP (XEXP (piece, 0), 0));
6120 }
6121
6122 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6123
6124 static rtx *
6125 decl_piece_varloc_ptr (rtx piece)
6126 {
6127 if ((int) GET_MODE (piece))
6128 return &XEXP (piece, 0);
6129 else
6130 return &XEXP (XEXP (piece, 0), 1);
6131 }
6132
6133 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6134 Next is the chain of following piece nodes. */
6135
6136 static rtx_expr_list *
6137 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6138 {
6139 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6140 return alloc_EXPR_LIST (bitsize, loc_note, next);
6141 else
6142 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6143 GEN_INT (bitsize),
6144 loc_note), next);
6145 }
6146
6147 /* Return rtx that should be stored into loc field for
6148 LOC_NOTE and BITPOS/BITSIZE. */
6149
6150 static rtx
6151 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6152 HOST_WIDE_INT bitsize)
6153 {
6154 if (bitsize != -1)
6155 {
6156 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6157 if (bitpos != 0)
6158 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6159 }
6160 return loc_note;
6161 }
6162
6163 /* This function either modifies location piece list *DEST in
6164 place (if SRC and INNER is NULL), or copies location piece list
6165 *SRC to *DEST while modifying it. Location BITPOS is modified
6166 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6167 not copied and if needed some padding around it is added.
6168 When modifying in place, DEST should point to EXPR_LIST where
6169 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6170 to the start of the whole list and INNER points to the EXPR_LIST
6171 where earlier pieces cover PIECE_BITPOS bits. */
6172
6173 static void
6174 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6175 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6176 HOST_WIDE_INT bitsize, rtx loc_note)
6177 {
6178 HOST_WIDE_INT diff;
6179 bool copy = inner != NULL;
6180
6181 if (copy)
6182 {
6183 /* First copy all nodes preceding the current bitpos. */
6184 while (src != inner)
6185 {
6186 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6187 decl_piece_bitsize (*src), NULL_RTX);
6188 dest = &XEXP (*dest, 1);
6189 src = &XEXP (*src, 1);
6190 }
6191 }
6192 /* Add padding if needed. */
6193 if (bitpos != piece_bitpos)
6194 {
6195 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6196 copy ? NULL_RTX : *dest);
6197 dest = &XEXP (*dest, 1);
6198 }
6199 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6200 {
6201 gcc_assert (!copy);
6202 /* A piece with correct bitpos and bitsize already exist,
6203 just update the location for it and return. */
6204 *decl_piece_varloc_ptr (*dest) = loc_note;
6205 return;
6206 }
6207 /* Add the piece that changed. */
6208 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6209 dest = &XEXP (*dest, 1);
6210 /* Skip over pieces that overlap it. */
6211 diff = bitpos - piece_bitpos + bitsize;
6212 if (!copy)
6213 src = dest;
6214 while (diff > 0 && *src)
6215 {
6216 rtx piece = *src;
6217 diff -= decl_piece_bitsize (piece);
6218 if (copy)
6219 src = &XEXP (piece, 1);
6220 else
6221 {
6222 *src = XEXP (piece, 1);
6223 free_EXPR_LIST_node (piece);
6224 }
6225 }
6226 /* Add padding if needed. */
6227 if (diff < 0 && *src)
6228 {
6229 if (!copy)
6230 dest = src;
6231 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6232 dest = &XEXP (*dest, 1);
6233 }
6234 if (!copy)
6235 return;
6236 /* Finally copy all nodes following it. */
6237 while (*src)
6238 {
6239 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6240 decl_piece_bitsize (*src), NULL_RTX);
6241 dest = &XEXP (*dest, 1);
6242 src = &XEXP (*src, 1);
6243 }
6244 }
6245
6246 /* Add a variable location node to the linked list for DECL. */
6247
6248 static struct var_loc_node *
6249 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6250 {
6251 unsigned int decl_id;
6252 var_loc_list *temp;
6253 struct var_loc_node *loc = NULL;
6254 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6255
6256 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6257 {
6258 tree realdecl = DECL_DEBUG_EXPR (decl);
6259 if (handled_component_p (realdecl)
6260 || (TREE_CODE (realdecl) == MEM_REF
6261 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6262 {
6263 bool reverse;
6264 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6265 &bitsize, &reverse);
6266 if (!innerdecl
6267 || !DECL_P (innerdecl)
6268 || DECL_IGNORED_P (innerdecl)
6269 || TREE_STATIC (innerdecl)
6270 || bitsize == 0
6271 || bitpos + bitsize > 256)
6272 return NULL;
6273 decl = innerdecl;
6274 }
6275 }
6276
6277 decl_id = DECL_UID (decl);
6278 var_loc_list **slot
6279 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6280 if (*slot == NULL)
6281 {
6282 temp = ggc_cleared_alloc<var_loc_list> ();
6283 temp->decl_id = decl_id;
6284 *slot = temp;
6285 }
6286 else
6287 temp = *slot;
6288
6289 /* For PARM_DECLs try to keep around the original incoming value,
6290 even if that means we'll emit a zero-range .debug_loc entry. */
6291 if (temp->last
6292 && temp->first == temp->last
6293 && TREE_CODE (decl) == PARM_DECL
6294 && NOTE_P (temp->first->loc)
6295 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6296 && DECL_INCOMING_RTL (decl)
6297 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6298 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6299 == GET_CODE (DECL_INCOMING_RTL (decl))
6300 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6301 && (bitsize != -1
6302 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6303 NOTE_VAR_LOCATION_LOC (loc_note))
6304 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6305 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6306 {
6307 loc = ggc_cleared_alloc<var_loc_node> ();
6308 temp->first->next = loc;
6309 temp->last = loc;
6310 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6311 }
6312 else if (temp->last)
6313 {
6314 struct var_loc_node *last = temp->last, *unused = NULL;
6315 rtx *piece_loc = NULL, last_loc_note;
6316 HOST_WIDE_INT piece_bitpos = 0;
6317 if (last->next)
6318 {
6319 last = last->next;
6320 gcc_assert (last->next == NULL);
6321 }
6322 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6323 {
6324 piece_loc = &last->loc;
6325 do
6326 {
6327 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6328 if (piece_bitpos + cur_bitsize > bitpos)
6329 break;
6330 piece_bitpos += cur_bitsize;
6331 piece_loc = &XEXP (*piece_loc, 1);
6332 }
6333 while (*piece_loc);
6334 }
6335 /* TEMP->LAST here is either pointer to the last but one or
6336 last element in the chained list, LAST is pointer to the
6337 last element. */
6338 if (label && strcmp (last->label, label) == 0 && last->view == view)
6339 {
6340 /* For SRA optimized variables if there weren't any real
6341 insns since last note, just modify the last node. */
6342 if (piece_loc != NULL)
6343 {
6344 adjust_piece_list (piece_loc, NULL, NULL,
6345 bitpos, piece_bitpos, bitsize, loc_note);
6346 return NULL;
6347 }
6348 /* If the last note doesn't cover any instructions, remove it. */
6349 if (temp->last != last)
6350 {
6351 temp->last->next = NULL;
6352 unused = last;
6353 last = temp->last;
6354 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6355 }
6356 else
6357 {
6358 gcc_assert (temp->first == temp->last
6359 || (temp->first->next == temp->last
6360 && TREE_CODE (decl) == PARM_DECL));
6361 memset (temp->last, '\0', sizeof (*temp->last));
6362 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6363 return temp->last;
6364 }
6365 }
6366 if (bitsize == -1 && NOTE_P (last->loc))
6367 last_loc_note = last->loc;
6368 else if (piece_loc != NULL
6369 && *piece_loc != NULL_RTX
6370 && piece_bitpos == bitpos
6371 && decl_piece_bitsize (*piece_loc) == bitsize)
6372 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6373 else
6374 last_loc_note = NULL_RTX;
6375 /* If the current location is the same as the end of the list,
6376 and either both or neither of the locations is uninitialized,
6377 we have nothing to do. */
6378 if (last_loc_note == NULL_RTX
6379 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6380 NOTE_VAR_LOCATION_LOC (loc_note)))
6381 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6382 != NOTE_VAR_LOCATION_STATUS (loc_note))
6383 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6384 == VAR_INIT_STATUS_UNINITIALIZED)
6385 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6386 == VAR_INIT_STATUS_UNINITIALIZED))))
6387 {
6388 /* Add LOC to the end of list and update LAST. If the last
6389 element of the list has been removed above, reuse its
6390 memory for the new node, otherwise allocate a new one. */
6391 if (unused)
6392 {
6393 loc = unused;
6394 memset (loc, '\0', sizeof (*loc));
6395 }
6396 else
6397 loc = ggc_cleared_alloc<var_loc_node> ();
6398 if (bitsize == -1 || piece_loc == NULL)
6399 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6400 else
6401 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6402 bitpos, piece_bitpos, bitsize, loc_note);
6403 last->next = loc;
6404 /* Ensure TEMP->LAST will point either to the new last but one
6405 element of the chain, or to the last element in it. */
6406 if (last != temp->last)
6407 temp->last = last;
6408 }
6409 else if (unused)
6410 ggc_free (unused);
6411 }
6412 else
6413 {
6414 loc = ggc_cleared_alloc<var_loc_node> ();
6415 temp->first = loc;
6416 temp->last = loc;
6417 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6418 }
6419 return loc;
6420 }
6421 \f
6422 /* Keep track of the number of spaces used to indent the
6423 output of the debugging routines that print the structure of
6424 the DIE internal representation. */
6425 static int print_indent;
6426
6427 /* Indent the line the number of spaces given by print_indent. */
6428
6429 static inline void
6430 print_spaces (FILE *outfile)
6431 {
6432 fprintf (outfile, "%*s", print_indent, "");
6433 }
6434
6435 /* Print a type signature in hex. */
6436
6437 static inline void
6438 print_signature (FILE *outfile, char *sig)
6439 {
6440 int i;
6441
6442 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6443 fprintf (outfile, "%02x", sig[i] & 0xff);
6444 }
6445
6446 static inline void
6447 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6448 {
6449 if (discr_value->pos)
6450 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6451 else
6452 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6453 }
6454
6455 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6456
6457 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6458 RECURSE, output location descriptor operations. */
6459
6460 static void
6461 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6462 {
6463 switch (val->val_class)
6464 {
6465 case dw_val_class_addr:
6466 fprintf (outfile, "address");
6467 break;
6468 case dw_val_class_offset:
6469 fprintf (outfile, "offset");
6470 break;
6471 case dw_val_class_loc:
6472 fprintf (outfile, "location descriptor");
6473 if (val->v.val_loc == NULL)
6474 fprintf (outfile, " -> <null>\n");
6475 else if (recurse)
6476 {
6477 fprintf (outfile, ":\n");
6478 print_indent += 4;
6479 print_loc_descr (val->v.val_loc, outfile);
6480 print_indent -= 4;
6481 }
6482 else
6483 {
6484 if (flag_dump_noaddr || flag_dump_unnumbered)
6485 fprintf (outfile, " #\n");
6486 else
6487 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6488 }
6489 break;
6490 case dw_val_class_loc_list:
6491 fprintf (outfile, "location list -> label:%s",
6492 val->v.val_loc_list->ll_symbol);
6493 break;
6494 case dw_val_class_view_list:
6495 val = view_list_to_loc_list_val_node (val);
6496 fprintf (outfile, "location list with views -> labels:%s and %s",
6497 val->v.val_loc_list->ll_symbol,
6498 val->v.val_loc_list->vl_symbol);
6499 break;
6500 case dw_val_class_range_list:
6501 fprintf (outfile, "range list");
6502 break;
6503 case dw_val_class_const:
6504 case dw_val_class_const_implicit:
6505 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6506 break;
6507 case dw_val_class_unsigned_const:
6508 case dw_val_class_unsigned_const_implicit:
6509 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6510 break;
6511 case dw_val_class_const_double:
6512 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6513 HOST_WIDE_INT_PRINT_UNSIGNED")",
6514 val->v.val_double.high,
6515 val->v.val_double.low);
6516 break;
6517 case dw_val_class_wide_int:
6518 {
6519 int i = val->v.val_wide->get_len ();
6520 fprintf (outfile, "constant (");
6521 gcc_assert (i > 0);
6522 if (val->v.val_wide->elt (i - 1) == 0)
6523 fprintf (outfile, "0x");
6524 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6525 val->v.val_wide->elt (--i));
6526 while (--i >= 0)
6527 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6528 val->v.val_wide->elt (i));
6529 fprintf (outfile, ")");
6530 break;
6531 }
6532 case dw_val_class_vec:
6533 fprintf (outfile, "floating-point or vector constant");
6534 break;
6535 case dw_val_class_flag:
6536 fprintf (outfile, "%u", val->v.val_flag);
6537 break;
6538 case dw_val_class_die_ref:
6539 if (val->v.val_die_ref.die != NULL)
6540 {
6541 dw_die_ref die = val->v.val_die_ref.die;
6542
6543 if (die->comdat_type_p)
6544 {
6545 fprintf (outfile, "die -> signature: ");
6546 print_signature (outfile,
6547 die->die_id.die_type_node->signature);
6548 }
6549 else if (die->die_id.die_symbol)
6550 {
6551 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6552 if (die->with_offset)
6553 fprintf (outfile, " + %ld", die->die_offset);
6554 }
6555 else
6556 fprintf (outfile, "die -> %ld", die->die_offset);
6557 if (flag_dump_noaddr || flag_dump_unnumbered)
6558 fprintf (outfile, " #");
6559 else
6560 fprintf (outfile, " (%p)", (void *) die);
6561 }
6562 else
6563 fprintf (outfile, "die -> <null>");
6564 break;
6565 case dw_val_class_vms_delta:
6566 fprintf (outfile, "delta: @slotcount(%s-%s)",
6567 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6568 break;
6569 case dw_val_class_symview:
6570 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6571 break;
6572 case dw_val_class_lbl_id:
6573 case dw_val_class_lineptr:
6574 case dw_val_class_macptr:
6575 case dw_val_class_loclistsptr:
6576 case dw_val_class_high_pc:
6577 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6578 break;
6579 case dw_val_class_str:
6580 if (val->v.val_str->str != NULL)
6581 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6582 else
6583 fprintf (outfile, "<null>");
6584 break;
6585 case dw_val_class_file:
6586 case dw_val_class_file_implicit:
6587 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6588 val->v.val_file->emitted_number);
6589 break;
6590 case dw_val_class_data8:
6591 {
6592 int i;
6593
6594 for (i = 0; i < 8; i++)
6595 fprintf (outfile, "%02x", val->v.val_data8[i]);
6596 break;
6597 }
6598 case dw_val_class_discr_value:
6599 print_discr_value (outfile, &val->v.val_discr_value);
6600 break;
6601 case dw_val_class_discr_list:
6602 for (dw_discr_list_ref node = val->v.val_discr_list;
6603 node != NULL;
6604 node = node->dw_discr_next)
6605 {
6606 if (node->dw_discr_range)
6607 {
6608 fprintf (outfile, " .. ");
6609 print_discr_value (outfile, &node->dw_discr_lower_bound);
6610 print_discr_value (outfile, &node->dw_discr_upper_bound);
6611 }
6612 else
6613 print_discr_value (outfile, &node->dw_discr_lower_bound);
6614
6615 if (node->dw_discr_next != NULL)
6616 fprintf (outfile, " | ");
6617 }
6618 default:
6619 break;
6620 }
6621 }
6622
6623 /* Likewise, for a DIE attribute. */
6624
6625 static void
6626 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6627 {
6628 print_dw_val (&a->dw_attr_val, recurse, outfile);
6629 }
6630
6631
6632 /* Print the list of operands in the LOC location description to OUTFILE. This
6633 routine is a debugging aid only. */
6634
6635 static void
6636 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6637 {
6638 dw_loc_descr_ref l = loc;
6639
6640 if (loc == NULL)
6641 {
6642 print_spaces (outfile);
6643 fprintf (outfile, "<null>\n");
6644 return;
6645 }
6646
6647 for (l = loc; l != NULL; l = l->dw_loc_next)
6648 {
6649 print_spaces (outfile);
6650 if (flag_dump_noaddr || flag_dump_unnumbered)
6651 fprintf (outfile, "#");
6652 else
6653 fprintf (outfile, "(%p)", (void *) l);
6654 fprintf (outfile, " %s",
6655 dwarf_stack_op_name (l->dw_loc_opc));
6656 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6657 {
6658 fprintf (outfile, " ");
6659 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6660 }
6661 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6662 {
6663 fprintf (outfile, ", ");
6664 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6665 }
6666 fprintf (outfile, "\n");
6667 }
6668 }
6669
6670 /* Print the information associated with a given DIE, and its children.
6671 This routine is a debugging aid only. */
6672
6673 static void
6674 print_die (dw_die_ref die, FILE *outfile)
6675 {
6676 dw_attr_node *a;
6677 dw_die_ref c;
6678 unsigned ix;
6679
6680 print_spaces (outfile);
6681 fprintf (outfile, "DIE %4ld: %s ",
6682 die->die_offset, dwarf_tag_name (die->die_tag));
6683 if (flag_dump_noaddr || flag_dump_unnumbered)
6684 fprintf (outfile, "#\n");
6685 else
6686 fprintf (outfile, "(%p)\n", (void*) die);
6687 print_spaces (outfile);
6688 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6689 fprintf (outfile, " offset: %ld", die->die_offset);
6690 fprintf (outfile, " mark: %d\n", die->die_mark);
6691
6692 if (die->comdat_type_p)
6693 {
6694 print_spaces (outfile);
6695 fprintf (outfile, " signature: ");
6696 print_signature (outfile, die->die_id.die_type_node->signature);
6697 fprintf (outfile, "\n");
6698 }
6699
6700 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6701 {
6702 print_spaces (outfile);
6703 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6704
6705 print_attribute (a, true, outfile);
6706 fprintf (outfile, "\n");
6707 }
6708
6709 if (die->die_child != NULL)
6710 {
6711 print_indent += 4;
6712 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6713 print_indent -= 4;
6714 }
6715 if (print_indent == 0)
6716 fprintf (outfile, "\n");
6717 }
6718
6719 /* Print the list of operations in the LOC location description. */
6720
6721 DEBUG_FUNCTION void
6722 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6723 {
6724 print_loc_descr (loc, stderr);
6725 }
6726
6727 /* Print the information collected for a given DIE. */
6728
6729 DEBUG_FUNCTION void
6730 debug_dwarf_die (dw_die_ref die)
6731 {
6732 print_die (die, stderr);
6733 }
6734
6735 DEBUG_FUNCTION void
6736 debug (die_struct &ref)
6737 {
6738 print_die (&ref, stderr);
6739 }
6740
6741 DEBUG_FUNCTION void
6742 debug (die_struct *ptr)
6743 {
6744 if (ptr)
6745 debug (*ptr);
6746 else
6747 fprintf (stderr, "<nil>\n");
6748 }
6749
6750
6751 /* Print all DWARF information collected for the compilation unit.
6752 This routine is a debugging aid only. */
6753
6754 DEBUG_FUNCTION void
6755 debug_dwarf (void)
6756 {
6757 print_indent = 0;
6758 print_die (comp_unit_die (), stderr);
6759 }
6760
6761 /* Verify the DIE tree structure. */
6762
6763 DEBUG_FUNCTION void
6764 verify_die (dw_die_ref die)
6765 {
6766 gcc_assert (!die->die_mark);
6767 if (die->die_parent == NULL
6768 && die->die_sib == NULL)
6769 return;
6770 /* Verify the die_sib list is cyclic. */
6771 dw_die_ref x = die;
6772 do
6773 {
6774 x->die_mark = 1;
6775 x = x->die_sib;
6776 }
6777 while (x && !x->die_mark);
6778 gcc_assert (x == die);
6779 x = die;
6780 do
6781 {
6782 /* Verify all dies have the same parent. */
6783 gcc_assert (x->die_parent == die->die_parent);
6784 if (x->die_child)
6785 {
6786 /* Verify the child has the proper parent and recurse. */
6787 gcc_assert (x->die_child->die_parent == x);
6788 verify_die (x->die_child);
6789 }
6790 x->die_mark = 0;
6791 x = x->die_sib;
6792 }
6793 while (x && x->die_mark);
6794 }
6795
6796 /* Sanity checks on DIEs. */
6797
6798 static void
6799 check_die (dw_die_ref die)
6800 {
6801 unsigned ix;
6802 dw_attr_node *a;
6803 bool inline_found = false;
6804 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6805 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6806 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6807 {
6808 switch (a->dw_attr)
6809 {
6810 case DW_AT_inline:
6811 if (a->dw_attr_val.v.val_unsigned)
6812 inline_found = true;
6813 break;
6814 case DW_AT_location:
6815 ++n_location;
6816 break;
6817 case DW_AT_low_pc:
6818 ++n_low_pc;
6819 break;
6820 case DW_AT_high_pc:
6821 ++n_high_pc;
6822 break;
6823 case DW_AT_artificial:
6824 ++n_artificial;
6825 break;
6826 case DW_AT_decl_column:
6827 ++n_decl_column;
6828 break;
6829 case DW_AT_decl_line:
6830 ++n_decl_line;
6831 break;
6832 case DW_AT_decl_file:
6833 ++n_decl_file;
6834 break;
6835 default:
6836 break;
6837 }
6838 }
6839 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6840 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6841 {
6842 fprintf (stderr, "Duplicate attributes in DIE:\n");
6843 debug_dwarf_die (die);
6844 gcc_unreachable ();
6845 }
6846 if (inline_found)
6847 {
6848 /* A debugging information entry that is a member of an abstract
6849 instance tree [that has DW_AT_inline] should not contain any
6850 attributes which describe aspects of the subroutine which vary
6851 between distinct inlined expansions or distinct out-of-line
6852 expansions. */
6853 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6854 gcc_assert (a->dw_attr != DW_AT_low_pc
6855 && a->dw_attr != DW_AT_high_pc
6856 && a->dw_attr != DW_AT_location
6857 && a->dw_attr != DW_AT_frame_base
6858 && a->dw_attr != DW_AT_call_all_calls
6859 && a->dw_attr != DW_AT_GNU_all_call_sites);
6860 }
6861 }
6862 \f
6863 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6864 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6865 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6866
6867 /* Calculate the checksum of a location expression. */
6868
6869 static inline void
6870 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6871 {
6872 int tem;
6873 inchash::hash hstate;
6874 hashval_t hash;
6875
6876 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6877 CHECKSUM (tem);
6878 hash_loc_operands (loc, hstate);
6879 hash = hstate.end();
6880 CHECKSUM (hash);
6881 }
6882
6883 /* Calculate the checksum of an attribute. */
6884
6885 static void
6886 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6887 {
6888 dw_loc_descr_ref loc;
6889 rtx r;
6890
6891 CHECKSUM (at->dw_attr);
6892
6893 /* We don't care that this was compiled with a different compiler
6894 snapshot; if the output is the same, that's what matters. */
6895 if (at->dw_attr == DW_AT_producer)
6896 return;
6897
6898 switch (AT_class (at))
6899 {
6900 case dw_val_class_const:
6901 case dw_val_class_const_implicit:
6902 CHECKSUM (at->dw_attr_val.v.val_int);
6903 break;
6904 case dw_val_class_unsigned_const:
6905 case dw_val_class_unsigned_const_implicit:
6906 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6907 break;
6908 case dw_val_class_const_double:
6909 CHECKSUM (at->dw_attr_val.v.val_double);
6910 break;
6911 case dw_val_class_wide_int:
6912 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6913 get_full_len (*at->dw_attr_val.v.val_wide)
6914 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6915 break;
6916 case dw_val_class_vec:
6917 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6918 (at->dw_attr_val.v.val_vec.length
6919 * at->dw_attr_val.v.val_vec.elt_size));
6920 break;
6921 case dw_val_class_flag:
6922 CHECKSUM (at->dw_attr_val.v.val_flag);
6923 break;
6924 case dw_val_class_str:
6925 CHECKSUM_STRING (AT_string (at));
6926 break;
6927
6928 case dw_val_class_addr:
6929 r = AT_addr (at);
6930 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6931 CHECKSUM_STRING (XSTR (r, 0));
6932 break;
6933
6934 case dw_val_class_offset:
6935 CHECKSUM (at->dw_attr_val.v.val_offset);
6936 break;
6937
6938 case dw_val_class_loc:
6939 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6940 loc_checksum (loc, ctx);
6941 break;
6942
6943 case dw_val_class_die_ref:
6944 die_checksum (AT_ref (at), ctx, mark);
6945 break;
6946
6947 case dw_val_class_fde_ref:
6948 case dw_val_class_vms_delta:
6949 case dw_val_class_symview:
6950 case dw_val_class_lbl_id:
6951 case dw_val_class_lineptr:
6952 case dw_val_class_macptr:
6953 case dw_val_class_loclistsptr:
6954 case dw_val_class_high_pc:
6955 break;
6956
6957 case dw_val_class_file:
6958 case dw_val_class_file_implicit:
6959 CHECKSUM_STRING (AT_file (at)->filename);
6960 break;
6961
6962 case dw_val_class_data8:
6963 CHECKSUM (at->dw_attr_val.v.val_data8);
6964 break;
6965
6966 default:
6967 break;
6968 }
6969 }
6970
6971 /* Calculate the checksum of a DIE. */
6972
6973 static void
6974 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6975 {
6976 dw_die_ref c;
6977 dw_attr_node *a;
6978 unsigned ix;
6979
6980 /* To avoid infinite recursion. */
6981 if (die->die_mark)
6982 {
6983 CHECKSUM (die->die_mark);
6984 return;
6985 }
6986 die->die_mark = ++(*mark);
6987
6988 CHECKSUM (die->die_tag);
6989
6990 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6991 attr_checksum (a, ctx, mark);
6992
6993 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6994 }
6995
6996 #undef CHECKSUM
6997 #undef CHECKSUM_BLOCK
6998 #undef CHECKSUM_STRING
6999
7000 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
7001 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
7002 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7003 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7004 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7005 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7006 #define CHECKSUM_ATTR(FOO) \
7007 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7008
7009 /* Calculate the checksum of a number in signed LEB128 format. */
7010
7011 static void
7012 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7013 {
7014 unsigned char byte;
7015 bool more;
7016
7017 while (1)
7018 {
7019 byte = (value & 0x7f);
7020 value >>= 7;
7021 more = !((value == 0 && (byte & 0x40) == 0)
7022 || (value == -1 && (byte & 0x40) != 0));
7023 if (more)
7024 byte |= 0x80;
7025 CHECKSUM (byte);
7026 if (!more)
7027 break;
7028 }
7029 }
7030
7031 /* Calculate the checksum of a number in unsigned LEB128 format. */
7032
7033 static void
7034 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7035 {
7036 while (1)
7037 {
7038 unsigned char byte = (value & 0x7f);
7039 value >>= 7;
7040 if (value != 0)
7041 /* More bytes to follow. */
7042 byte |= 0x80;
7043 CHECKSUM (byte);
7044 if (value == 0)
7045 break;
7046 }
7047 }
7048
7049 /* Checksum the context of the DIE. This adds the names of any
7050 surrounding namespaces or structures to the checksum. */
7051
7052 static void
7053 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7054 {
7055 const char *name;
7056 dw_die_ref spec;
7057 int tag = die->die_tag;
7058
7059 if (tag != DW_TAG_namespace
7060 && tag != DW_TAG_structure_type
7061 && tag != DW_TAG_class_type)
7062 return;
7063
7064 name = get_AT_string (die, DW_AT_name);
7065
7066 spec = get_AT_ref (die, DW_AT_specification);
7067 if (spec != NULL)
7068 die = spec;
7069
7070 if (die->die_parent != NULL)
7071 checksum_die_context (die->die_parent, ctx);
7072
7073 CHECKSUM_ULEB128 ('C');
7074 CHECKSUM_ULEB128 (tag);
7075 if (name != NULL)
7076 CHECKSUM_STRING (name);
7077 }
7078
7079 /* Calculate the checksum of a location expression. */
7080
7081 static inline void
7082 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7083 {
7084 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7085 were emitted as a DW_FORM_sdata instead of a location expression. */
7086 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7087 {
7088 CHECKSUM_ULEB128 (DW_FORM_sdata);
7089 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7090 return;
7091 }
7092
7093 /* Otherwise, just checksum the raw location expression. */
7094 while (loc != NULL)
7095 {
7096 inchash::hash hstate;
7097 hashval_t hash;
7098
7099 CHECKSUM_ULEB128 (loc->dtprel);
7100 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7101 hash_loc_operands (loc, hstate);
7102 hash = hstate.end ();
7103 CHECKSUM (hash);
7104 loc = loc->dw_loc_next;
7105 }
7106 }
7107
7108 /* Calculate the checksum of an attribute. */
7109
7110 static void
7111 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7112 struct md5_ctx *ctx, int *mark)
7113 {
7114 dw_loc_descr_ref loc;
7115 rtx r;
7116
7117 if (AT_class (at) == dw_val_class_die_ref)
7118 {
7119 dw_die_ref target_die = AT_ref (at);
7120
7121 /* For pointer and reference types, we checksum only the (qualified)
7122 name of the target type (if there is a name). For friend entries,
7123 we checksum only the (qualified) name of the target type or function.
7124 This allows the checksum to remain the same whether the target type
7125 is complete or not. */
7126 if ((at->dw_attr == DW_AT_type
7127 && (tag == DW_TAG_pointer_type
7128 || tag == DW_TAG_reference_type
7129 || tag == DW_TAG_rvalue_reference_type
7130 || tag == DW_TAG_ptr_to_member_type))
7131 || (at->dw_attr == DW_AT_friend
7132 && tag == DW_TAG_friend))
7133 {
7134 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7135
7136 if (name_attr != NULL)
7137 {
7138 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7139
7140 if (decl == NULL)
7141 decl = target_die;
7142 CHECKSUM_ULEB128 ('N');
7143 CHECKSUM_ULEB128 (at->dw_attr);
7144 if (decl->die_parent != NULL)
7145 checksum_die_context (decl->die_parent, ctx);
7146 CHECKSUM_ULEB128 ('E');
7147 CHECKSUM_STRING (AT_string (name_attr));
7148 return;
7149 }
7150 }
7151
7152 /* For all other references to another DIE, we check to see if the
7153 target DIE has already been visited. If it has, we emit a
7154 backward reference; if not, we descend recursively. */
7155 if (target_die->die_mark > 0)
7156 {
7157 CHECKSUM_ULEB128 ('R');
7158 CHECKSUM_ULEB128 (at->dw_attr);
7159 CHECKSUM_ULEB128 (target_die->die_mark);
7160 }
7161 else
7162 {
7163 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7164
7165 if (decl == NULL)
7166 decl = target_die;
7167 target_die->die_mark = ++(*mark);
7168 CHECKSUM_ULEB128 ('T');
7169 CHECKSUM_ULEB128 (at->dw_attr);
7170 if (decl->die_parent != NULL)
7171 checksum_die_context (decl->die_parent, ctx);
7172 die_checksum_ordered (target_die, ctx, mark);
7173 }
7174 return;
7175 }
7176
7177 CHECKSUM_ULEB128 ('A');
7178 CHECKSUM_ULEB128 (at->dw_attr);
7179
7180 switch (AT_class (at))
7181 {
7182 case dw_val_class_const:
7183 case dw_val_class_const_implicit:
7184 CHECKSUM_ULEB128 (DW_FORM_sdata);
7185 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7186 break;
7187
7188 case dw_val_class_unsigned_const:
7189 case dw_val_class_unsigned_const_implicit:
7190 CHECKSUM_ULEB128 (DW_FORM_sdata);
7191 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7192 break;
7193
7194 case dw_val_class_const_double:
7195 CHECKSUM_ULEB128 (DW_FORM_block);
7196 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7197 CHECKSUM (at->dw_attr_val.v.val_double);
7198 break;
7199
7200 case dw_val_class_wide_int:
7201 CHECKSUM_ULEB128 (DW_FORM_block);
7202 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7203 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7204 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7205 get_full_len (*at->dw_attr_val.v.val_wide)
7206 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7207 break;
7208
7209 case dw_val_class_vec:
7210 CHECKSUM_ULEB128 (DW_FORM_block);
7211 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7212 * at->dw_attr_val.v.val_vec.elt_size);
7213 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7214 (at->dw_attr_val.v.val_vec.length
7215 * at->dw_attr_val.v.val_vec.elt_size));
7216 break;
7217
7218 case dw_val_class_flag:
7219 CHECKSUM_ULEB128 (DW_FORM_flag);
7220 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7221 break;
7222
7223 case dw_val_class_str:
7224 CHECKSUM_ULEB128 (DW_FORM_string);
7225 CHECKSUM_STRING (AT_string (at));
7226 break;
7227
7228 case dw_val_class_addr:
7229 r = AT_addr (at);
7230 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7231 CHECKSUM_ULEB128 (DW_FORM_string);
7232 CHECKSUM_STRING (XSTR (r, 0));
7233 break;
7234
7235 case dw_val_class_offset:
7236 CHECKSUM_ULEB128 (DW_FORM_sdata);
7237 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7238 break;
7239
7240 case dw_val_class_loc:
7241 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7242 loc_checksum_ordered (loc, ctx);
7243 break;
7244
7245 case dw_val_class_fde_ref:
7246 case dw_val_class_symview:
7247 case dw_val_class_lbl_id:
7248 case dw_val_class_lineptr:
7249 case dw_val_class_macptr:
7250 case dw_val_class_loclistsptr:
7251 case dw_val_class_high_pc:
7252 break;
7253
7254 case dw_val_class_file:
7255 case dw_val_class_file_implicit:
7256 CHECKSUM_ULEB128 (DW_FORM_string);
7257 CHECKSUM_STRING (AT_file (at)->filename);
7258 break;
7259
7260 case dw_val_class_data8:
7261 CHECKSUM (at->dw_attr_val.v.val_data8);
7262 break;
7263
7264 default:
7265 break;
7266 }
7267 }
7268
7269 struct checksum_attributes
7270 {
7271 dw_attr_node *at_name;
7272 dw_attr_node *at_type;
7273 dw_attr_node *at_friend;
7274 dw_attr_node *at_accessibility;
7275 dw_attr_node *at_address_class;
7276 dw_attr_node *at_alignment;
7277 dw_attr_node *at_allocated;
7278 dw_attr_node *at_artificial;
7279 dw_attr_node *at_associated;
7280 dw_attr_node *at_binary_scale;
7281 dw_attr_node *at_bit_offset;
7282 dw_attr_node *at_bit_size;
7283 dw_attr_node *at_bit_stride;
7284 dw_attr_node *at_byte_size;
7285 dw_attr_node *at_byte_stride;
7286 dw_attr_node *at_const_value;
7287 dw_attr_node *at_containing_type;
7288 dw_attr_node *at_count;
7289 dw_attr_node *at_data_location;
7290 dw_attr_node *at_data_member_location;
7291 dw_attr_node *at_decimal_scale;
7292 dw_attr_node *at_decimal_sign;
7293 dw_attr_node *at_default_value;
7294 dw_attr_node *at_digit_count;
7295 dw_attr_node *at_discr;
7296 dw_attr_node *at_discr_list;
7297 dw_attr_node *at_discr_value;
7298 dw_attr_node *at_encoding;
7299 dw_attr_node *at_endianity;
7300 dw_attr_node *at_explicit;
7301 dw_attr_node *at_is_optional;
7302 dw_attr_node *at_location;
7303 dw_attr_node *at_lower_bound;
7304 dw_attr_node *at_mutable;
7305 dw_attr_node *at_ordering;
7306 dw_attr_node *at_picture_string;
7307 dw_attr_node *at_prototyped;
7308 dw_attr_node *at_small;
7309 dw_attr_node *at_segment;
7310 dw_attr_node *at_string_length;
7311 dw_attr_node *at_string_length_bit_size;
7312 dw_attr_node *at_string_length_byte_size;
7313 dw_attr_node *at_threads_scaled;
7314 dw_attr_node *at_upper_bound;
7315 dw_attr_node *at_use_location;
7316 dw_attr_node *at_use_UTF8;
7317 dw_attr_node *at_variable_parameter;
7318 dw_attr_node *at_virtuality;
7319 dw_attr_node *at_visibility;
7320 dw_attr_node *at_vtable_elem_location;
7321 };
7322
7323 /* Collect the attributes that we will want to use for the checksum. */
7324
7325 static void
7326 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7327 {
7328 dw_attr_node *a;
7329 unsigned ix;
7330
7331 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7332 {
7333 switch (a->dw_attr)
7334 {
7335 case DW_AT_name:
7336 attrs->at_name = a;
7337 break;
7338 case DW_AT_type:
7339 attrs->at_type = a;
7340 break;
7341 case DW_AT_friend:
7342 attrs->at_friend = a;
7343 break;
7344 case DW_AT_accessibility:
7345 attrs->at_accessibility = a;
7346 break;
7347 case DW_AT_address_class:
7348 attrs->at_address_class = a;
7349 break;
7350 case DW_AT_alignment:
7351 attrs->at_alignment = a;
7352 break;
7353 case DW_AT_allocated:
7354 attrs->at_allocated = a;
7355 break;
7356 case DW_AT_artificial:
7357 attrs->at_artificial = a;
7358 break;
7359 case DW_AT_associated:
7360 attrs->at_associated = a;
7361 break;
7362 case DW_AT_binary_scale:
7363 attrs->at_binary_scale = a;
7364 break;
7365 case DW_AT_bit_offset:
7366 attrs->at_bit_offset = a;
7367 break;
7368 case DW_AT_bit_size:
7369 attrs->at_bit_size = a;
7370 break;
7371 case DW_AT_bit_stride:
7372 attrs->at_bit_stride = a;
7373 break;
7374 case DW_AT_byte_size:
7375 attrs->at_byte_size = a;
7376 break;
7377 case DW_AT_byte_stride:
7378 attrs->at_byte_stride = a;
7379 break;
7380 case DW_AT_const_value:
7381 attrs->at_const_value = a;
7382 break;
7383 case DW_AT_containing_type:
7384 attrs->at_containing_type = a;
7385 break;
7386 case DW_AT_count:
7387 attrs->at_count = a;
7388 break;
7389 case DW_AT_data_location:
7390 attrs->at_data_location = a;
7391 break;
7392 case DW_AT_data_member_location:
7393 attrs->at_data_member_location = a;
7394 break;
7395 case DW_AT_decimal_scale:
7396 attrs->at_decimal_scale = a;
7397 break;
7398 case DW_AT_decimal_sign:
7399 attrs->at_decimal_sign = a;
7400 break;
7401 case DW_AT_default_value:
7402 attrs->at_default_value = a;
7403 break;
7404 case DW_AT_digit_count:
7405 attrs->at_digit_count = a;
7406 break;
7407 case DW_AT_discr:
7408 attrs->at_discr = a;
7409 break;
7410 case DW_AT_discr_list:
7411 attrs->at_discr_list = a;
7412 break;
7413 case DW_AT_discr_value:
7414 attrs->at_discr_value = a;
7415 break;
7416 case DW_AT_encoding:
7417 attrs->at_encoding = a;
7418 break;
7419 case DW_AT_endianity:
7420 attrs->at_endianity = a;
7421 break;
7422 case DW_AT_explicit:
7423 attrs->at_explicit = a;
7424 break;
7425 case DW_AT_is_optional:
7426 attrs->at_is_optional = a;
7427 break;
7428 case DW_AT_location:
7429 attrs->at_location = a;
7430 break;
7431 case DW_AT_lower_bound:
7432 attrs->at_lower_bound = a;
7433 break;
7434 case DW_AT_mutable:
7435 attrs->at_mutable = a;
7436 break;
7437 case DW_AT_ordering:
7438 attrs->at_ordering = a;
7439 break;
7440 case DW_AT_picture_string:
7441 attrs->at_picture_string = a;
7442 break;
7443 case DW_AT_prototyped:
7444 attrs->at_prototyped = a;
7445 break;
7446 case DW_AT_small:
7447 attrs->at_small = a;
7448 break;
7449 case DW_AT_segment:
7450 attrs->at_segment = a;
7451 break;
7452 case DW_AT_string_length:
7453 attrs->at_string_length = a;
7454 break;
7455 case DW_AT_string_length_bit_size:
7456 attrs->at_string_length_bit_size = a;
7457 break;
7458 case DW_AT_string_length_byte_size:
7459 attrs->at_string_length_byte_size = a;
7460 break;
7461 case DW_AT_threads_scaled:
7462 attrs->at_threads_scaled = a;
7463 break;
7464 case DW_AT_upper_bound:
7465 attrs->at_upper_bound = a;
7466 break;
7467 case DW_AT_use_location:
7468 attrs->at_use_location = a;
7469 break;
7470 case DW_AT_use_UTF8:
7471 attrs->at_use_UTF8 = a;
7472 break;
7473 case DW_AT_variable_parameter:
7474 attrs->at_variable_parameter = a;
7475 break;
7476 case DW_AT_virtuality:
7477 attrs->at_virtuality = a;
7478 break;
7479 case DW_AT_visibility:
7480 attrs->at_visibility = a;
7481 break;
7482 case DW_AT_vtable_elem_location:
7483 attrs->at_vtable_elem_location = a;
7484 break;
7485 default:
7486 break;
7487 }
7488 }
7489 }
7490
7491 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7492
7493 static void
7494 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7495 {
7496 dw_die_ref c;
7497 dw_die_ref decl;
7498 struct checksum_attributes attrs;
7499
7500 CHECKSUM_ULEB128 ('D');
7501 CHECKSUM_ULEB128 (die->die_tag);
7502
7503 memset (&attrs, 0, sizeof (attrs));
7504
7505 decl = get_AT_ref (die, DW_AT_specification);
7506 if (decl != NULL)
7507 collect_checksum_attributes (&attrs, decl);
7508 collect_checksum_attributes (&attrs, die);
7509
7510 CHECKSUM_ATTR (attrs.at_name);
7511 CHECKSUM_ATTR (attrs.at_accessibility);
7512 CHECKSUM_ATTR (attrs.at_address_class);
7513 CHECKSUM_ATTR (attrs.at_allocated);
7514 CHECKSUM_ATTR (attrs.at_artificial);
7515 CHECKSUM_ATTR (attrs.at_associated);
7516 CHECKSUM_ATTR (attrs.at_binary_scale);
7517 CHECKSUM_ATTR (attrs.at_bit_offset);
7518 CHECKSUM_ATTR (attrs.at_bit_size);
7519 CHECKSUM_ATTR (attrs.at_bit_stride);
7520 CHECKSUM_ATTR (attrs.at_byte_size);
7521 CHECKSUM_ATTR (attrs.at_byte_stride);
7522 CHECKSUM_ATTR (attrs.at_const_value);
7523 CHECKSUM_ATTR (attrs.at_containing_type);
7524 CHECKSUM_ATTR (attrs.at_count);
7525 CHECKSUM_ATTR (attrs.at_data_location);
7526 CHECKSUM_ATTR (attrs.at_data_member_location);
7527 CHECKSUM_ATTR (attrs.at_decimal_scale);
7528 CHECKSUM_ATTR (attrs.at_decimal_sign);
7529 CHECKSUM_ATTR (attrs.at_default_value);
7530 CHECKSUM_ATTR (attrs.at_digit_count);
7531 CHECKSUM_ATTR (attrs.at_discr);
7532 CHECKSUM_ATTR (attrs.at_discr_list);
7533 CHECKSUM_ATTR (attrs.at_discr_value);
7534 CHECKSUM_ATTR (attrs.at_encoding);
7535 CHECKSUM_ATTR (attrs.at_endianity);
7536 CHECKSUM_ATTR (attrs.at_explicit);
7537 CHECKSUM_ATTR (attrs.at_is_optional);
7538 CHECKSUM_ATTR (attrs.at_location);
7539 CHECKSUM_ATTR (attrs.at_lower_bound);
7540 CHECKSUM_ATTR (attrs.at_mutable);
7541 CHECKSUM_ATTR (attrs.at_ordering);
7542 CHECKSUM_ATTR (attrs.at_picture_string);
7543 CHECKSUM_ATTR (attrs.at_prototyped);
7544 CHECKSUM_ATTR (attrs.at_small);
7545 CHECKSUM_ATTR (attrs.at_segment);
7546 CHECKSUM_ATTR (attrs.at_string_length);
7547 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7548 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7549 CHECKSUM_ATTR (attrs.at_threads_scaled);
7550 CHECKSUM_ATTR (attrs.at_upper_bound);
7551 CHECKSUM_ATTR (attrs.at_use_location);
7552 CHECKSUM_ATTR (attrs.at_use_UTF8);
7553 CHECKSUM_ATTR (attrs.at_variable_parameter);
7554 CHECKSUM_ATTR (attrs.at_virtuality);
7555 CHECKSUM_ATTR (attrs.at_visibility);
7556 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7557 CHECKSUM_ATTR (attrs.at_type);
7558 CHECKSUM_ATTR (attrs.at_friend);
7559 CHECKSUM_ATTR (attrs.at_alignment);
7560
7561 /* Checksum the child DIEs. */
7562 c = die->die_child;
7563 if (c) do {
7564 dw_attr_node *name_attr;
7565
7566 c = c->die_sib;
7567 name_attr = get_AT (c, DW_AT_name);
7568 if (is_template_instantiation (c))
7569 {
7570 /* Ignore instantiations of member type and function templates. */
7571 }
7572 else if (name_attr != NULL
7573 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7574 {
7575 /* Use a shallow checksum for named nested types and member
7576 functions. */
7577 CHECKSUM_ULEB128 ('S');
7578 CHECKSUM_ULEB128 (c->die_tag);
7579 CHECKSUM_STRING (AT_string (name_attr));
7580 }
7581 else
7582 {
7583 /* Use a deep checksum for other children. */
7584 /* Mark this DIE so it gets processed when unmarking. */
7585 if (c->die_mark == 0)
7586 c->die_mark = -1;
7587 die_checksum_ordered (c, ctx, mark);
7588 }
7589 } while (c != die->die_child);
7590
7591 CHECKSUM_ULEB128 (0);
7592 }
7593
7594 /* Add a type name and tag to a hash. */
7595 static void
7596 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7597 {
7598 CHECKSUM_ULEB128 (tag);
7599 CHECKSUM_STRING (name);
7600 }
7601
7602 #undef CHECKSUM
7603 #undef CHECKSUM_STRING
7604 #undef CHECKSUM_ATTR
7605 #undef CHECKSUM_LEB128
7606 #undef CHECKSUM_ULEB128
7607
7608 /* Generate the type signature for DIE. This is computed by generating an
7609 MD5 checksum over the DIE's tag, its relevant attributes, and its
7610 children. Attributes that are references to other DIEs are processed
7611 by recursion, using the MARK field to prevent infinite recursion.
7612 If the DIE is nested inside a namespace or another type, we also
7613 need to include that context in the signature. The lower 64 bits
7614 of the resulting MD5 checksum comprise the signature. */
7615
7616 static void
7617 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7618 {
7619 int mark;
7620 const char *name;
7621 unsigned char checksum[16];
7622 struct md5_ctx ctx;
7623 dw_die_ref decl;
7624 dw_die_ref parent;
7625
7626 name = get_AT_string (die, DW_AT_name);
7627 decl = get_AT_ref (die, DW_AT_specification);
7628 parent = get_die_parent (die);
7629
7630 /* First, compute a signature for just the type name (and its surrounding
7631 context, if any. This is stored in the type unit DIE for link-time
7632 ODR (one-definition rule) checking. */
7633
7634 if (is_cxx () && name != NULL)
7635 {
7636 md5_init_ctx (&ctx);
7637
7638 /* Checksum the names of surrounding namespaces and structures. */
7639 if (parent != NULL)
7640 checksum_die_context (parent, &ctx);
7641
7642 /* Checksum the current DIE. */
7643 die_odr_checksum (die->die_tag, name, &ctx);
7644 md5_finish_ctx (&ctx, checksum);
7645
7646 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7647 }
7648
7649 /* Next, compute the complete type signature. */
7650
7651 md5_init_ctx (&ctx);
7652 mark = 1;
7653 die->die_mark = mark;
7654
7655 /* Checksum the names of surrounding namespaces and structures. */
7656 if (parent != NULL)
7657 checksum_die_context (parent, &ctx);
7658
7659 /* Checksum the DIE and its children. */
7660 die_checksum_ordered (die, &ctx, &mark);
7661 unmark_all_dies (die);
7662 md5_finish_ctx (&ctx, checksum);
7663
7664 /* Store the signature in the type node and link the type DIE and the
7665 type node together. */
7666 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7667 DWARF_TYPE_SIGNATURE_SIZE);
7668 die->comdat_type_p = true;
7669 die->die_id.die_type_node = type_node;
7670 type_node->type_die = die;
7671
7672 /* If the DIE is a specification, link its declaration to the type node
7673 as well. */
7674 if (decl != NULL)
7675 {
7676 decl->comdat_type_p = true;
7677 decl->die_id.die_type_node = type_node;
7678 }
7679 }
7680
7681 /* Do the location expressions look same? */
7682 static inline int
7683 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7684 {
7685 return loc1->dw_loc_opc == loc2->dw_loc_opc
7686 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7687 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7688 }
7689
7690 /* Do the values look the same? */
7691 static int
7692 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7693 {
7694 dw_loc_descr_ref loc1, loc2;
7695 rtx r1, r2;
7696
7697 if (v1->val_class != v2->val_class)
7698 return 0;
7699
7700 switch (v1->val_class)
7701 {
7702 case dw_val_class_const:
7703 case dw_val_class_const_implicit:
7704 return v1->v.val_int == v2->v.val_int;
7705 case dw_val_class_unsigned_const:
7706 case dw_val_class_unsigned_const_implicit:
7707 return v1->v.val_unsigned == v2->v.val_unsigned;
7708 case dw_val_class_const_double:
7709 return v1->v.val_double.high == v2->v.val_double.high
7710 && v1->v.val_double.low == v2->v.val_double.low;
7711 case dw_val_class_wide_int:
7712 return *v1->v.val_wide == *v2->v.val_wide;
7713 case dw_val_class_vec:
7714 if (v1->v.val_vec.length != v2->v.val_vec.length
7715 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7716 return 0;
7717 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7718 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7719 return 0;
7720 return 1;
7721 case dw_val_class_flag:
7722 return v1->v.val_flag == v2->v.val_flag;
7723 case dw_val_class_str:
7724 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7725
7726 case dw_val_class_addr:
7727 r1 = v1->v.val_addr;
7728 r2 = v2->v.val_addr;
7729 if (GET_CODE (r1) != GET_CODE (r2))
7730 return 0;
7731 return !rtx_equal_p (r1, r2);
7732
7733 case dw_val_class_offset:
7734 return v1->v.val_offset == v2->v.val_offset;
7735
7736 case dw_val_class_loc:
7737 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7738 loc1 && loc2;
7739 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7740 if (!same_loc_p (loc1, loc2, mark))
7741 return 0;
7742 return !loc1 && !loc2;
7743
7744 case dw_val_class_die_ref:
7745 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7746
7747 case dw_val_class_symview:
7748 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7749
7750 case dw_val_class_fde_ref:
7751 case dw_val_class_vms_delta:
7752 case dw_val_class_lbl_id:
7753 case dw_val_class_lineptr:
7754 case dw_val_class_macptr:
7755 case dw_val_class_loclistsptr:
7756 case dw_val_class_high_pc:
7757 return 1;
7758
7759 case dw_val_class_file:
7760 case dw_val_class_file_implicit:
7761 return v1->v.val_file == v2->v.val_file;
7762
7763 case dw_val_class_data8:
7764 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7765
7766 default:
7767 return 1;
7768 }
7769 }
7770
7771 /* Do the attributes look the same? */
7772
7773 static int
7774 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7775 {
7776 if (at1->dw_attr != at2->dw_attr)
7777 return 0;
7778
7779 /* We don't care that this was compiled with a different compiler
7780 snapshot; if the output is the same, that's what matters. */
7781 if (at1->dw_attr == DW_AT_producer)
7782 return 1;
7783
7784 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7785 }
7786
7787 /* Do the dies look the same? */
7788
7789 static int
7790 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7791 {
7792 dw_die_ref c1, c2;
7793 dw_attr_node *a1;
7794 unsigned ix;
7795
7796 /* To avoid infinite recursion. */
7797 if (die1->die_mark)
7798 return die1->die_mark == die2->die_mark;
7799 die1->die_mark = die2->die_mark = ++(*mark);
7800
7801 if (die1->die_tag != die2->die_tag)
7802 return 0;
7803
7804 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7805 return 0;
7806
7807 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7808 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7809 return 0;
7810
7811 c1 = die1->die_child;
7812 c2 = die2->die_child;
7813 if (! c1)
7814 {
7815 if (c2)
7816 return 0;
7817 }
7818 else
7819 for (;;)
7820 {
7821 if (!same_die_p (c1, c2, mark))
7822 return 0;
7823 c1 = c1->die_sib;
7824 c2 = c2->die_sib;
7825 if (c1 == die1->die_child)
7826 {
7827 if (c2 == die2->die_child)
7828 break;
7829 else
7830 return 0;
7831 }
7832 }
7833
7834 return 1;
7835 }
7836
7837 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7838 children, and set die_symbol. */
7839
7840 static void
7841 compute_comp_unit_symbol (dw_die_ref unit_die)
7842 {
7843 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7844 const char *base = die_name ? lbasename (die_name) : "anonymous";
7845 char *name = XALLOCAVEC (char, strlen (base) + 64);
7846 char *p;
7847 int i, mark;
7848 unsigned char checksum[16];
7849 struct md5_ctx ctx;
7850
7851 /* Compute the checksum of the DIE, then append part of it as hex digits to
7852 the name filename of the unit. */
7853
7854 md5_init_ctx (&ctx);
7855 mark = 0;
7856 die_checksum (unit_die, &ctx, &mark);
7857 unmark_all_dies (unit_die);
7858 md5_finish_ctx (&ctx, checksum);
7859
7860 /* When we this for comp_unit_die () we have a DW_AT_name that might
7861 not start with a letter but with anything valid for filenames and
7862 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7863 character is not a letter. */
7864 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7865 clean_symbol_name (name);
7866
7867 p = name + strlen (name);
7868 for (i = 0; i < 4; i++)
7869 {
7870 sprintf (p, "%.2x", checksum[i]);
7871 p += 2;
7872 }
7873
7874 unit_die->die_id.die_symbol = xstrdup (name);
7875 }
7876
7877 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7878
7879 static int
7880 is_type_die (dw_die_ref die)
7881 {
7882 switch (die->die_tag)
7883 {
7884 case DW_TAG_array_type:
7885 case DW_TAG_class_type:
7886 case DW_TAG_interface_type:
7887 case DW_TAG_enumeration_type:
7888 case DW_TAG_pointer_type:
7889 case DW_TAG_reference_type:
7890 case DW_TAG_rvalue_reference_type:
7891 case DW_TAG_string_type:
7892 case DW_TAG_structure_type:
7893 case DW_TAG_subroutine_type:
7894 case DW_TAG_union_type:
7895 case DW_TAG_ptr_to_member_type:
7896 case DW_TAG_set_type:
7897 case DW_TAG_subrange_type:
7898 case DW_TAG_base_type:
7899 case DW_TAG_const_type:
7900 case DW_TAG_file_type:
7901 case DW_TAG_packed_type:
7902 case DW_TAG_volatile_type:
7903 case DW_TAG_typedef:
7904 return 1;
7905 default:
7906 return 0;
7907 }
7908 }
7909
7910 /* Returns true iff C is a compile-unit DIE. */
7911
7912 static inline bool
7913 is_cu_die (dw_die_ref c)
7914 {
7915 return c && (c->die_tag == DW_TAG_compile_unit
7916 || c->die_tag == DW_TAG_skeleton_unit);
7917 }
7918
7919 /* Returns true iff C is a unit DIE of some sort. */
7920
7921 static inline bool
7922 is_unit_die (dw_die_ref c)
7923 {
7924 return c && (c->die_tag == DW_TAG_compile_unit
7925 || c->die_tag == DW_TAG_partial_unit
7926 || c->die_tag == DW_TAG_type_unit
7927 || c->die_tag == DW_TAG_skeleton_unit);
7928 }
7929
7930 /* Returns true iff C is a namespace DIE. */
7931
7932 static inline bool
7933 is_namespace_die (dw_die_ref c)
7934 {
7935 return c && c->die_tag == DW_TAG_namespace;
7936 }
7937
7938 /* Return non-zero if this DIE is a template parameter. */
7939
7940 static inline bool
7941 is_template_parameter (dw_die_ref die)
7942 {
7943 switch (die->die_tag)
7944 {
7945 case DW_TAG_template_type_param:
7946 case DW_TAG_template_value_param:
7947 case DW_TAG_GNU_template_template_param:
7948 case DW_TAG_GNU_template_parameter_pack:
7949 return true;
7950 default:
7951 return false;
7952 }
7953 }
7954
7955 /* Return non-zero if this DIE represents a template instantiation. */
7956
7957 static inline bool
7958 is_template_instantiation (dw_die_ref die)
7959 {
7960 dw_die_ref c;
7961
7962 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7963 return false;
7964 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7965 return false;
7966 }
7967
7968 static char *
7969 gen_internal_sym (const char *prefix)
7970 {
7971 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7972
7973 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7974 return xstrdup (buf);
7975 }
7976
7977 /* Return non-zero if this DIE is a declaration. */
7978
7979 static int
7980 is_declaration_die (dw_die_ref die)
7981 {
7982 dw_attr_node *a;
7983 unsigned ix;
7984
7985 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7986 if (a->dw_attr == DW_AT_declaration)
7987 return 1;
7988
7989 return 0;
7990 }
7991
7992 /* Return non-zero if this DIE is nested inside a subprogram. */
7993
7994 static int
7995 is_nested_in_subprogram (dw_die_ref die)
7996 {
7997 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7998
7999 if (decl == NULL)
8000 decl = die;
8001 return local_scope_p (decl);
8002 }
8003
8004 /* Return non-zero if this DIE contains a defining declaration of a
8005 subprogram. */
8006
8007 static int
8008 contains_subprogram_definition (dw_die_ref die)
8009 {
8010 dw_die_ref c;
8011
8012 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8013 return 1;
8014 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8015 return 0;
8016 }
8017
8018 /* Return non-zero if this is a type DIE that should be moved to a
8019 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8020 unit type. */
8021
8022 static int
8023 should_move_die_to_comdat (dw_die_ref die)
8024 {
8025 switch (die->die_tag)
8026 {
8027 case DW_TAG_class_type:
8028 case DW_TAG_structure_type:
8029 case DW_TAG_enumeration_type:
8030 case DW_TAG_union_type:
8031 /* Don't move declarations, inlined instances, types nested in a
8032 subprogram, or types that contain subprogram definitions. */
8033 if (is_declaration_die (die)
8034 || get_AT (die, DW_AT_abstract_origin)
8035 || is_nested_in_subprogram (die)
8036 || contains_subprogram_definition (die))
8037 return 0;
8038 return 1;
8039 case DW_TAG_array_type:
8040 case DW_TAG_interface_type:
8041 case DW_TAG_pointer_type:
8042 case DW_TAG_reference_type:
8043 case DW_TAG_rvalue_reference_type:
8044 case DW_TAG_string_type:
8045 case DW_TAG_subroutine_type:
8046 case DW_TAG_ptr_to_member_type:
8047 case DW_TAG_set_type:
8048 case DW_TAG_subrange_type:
8049 case DW_TAG_base_type:
8050 case DW_TAG_const_type:
8051 case DW_TAG_file_type:
8052 case DW_TAG_packed_type:
8053 case DW_TAG_volatile_type:
8054 case DW_TAG_typedef:
8055 default:
8056 return 0;
8057 }
8058 }
8059
8060 /* Make a clone of DIE. */
8061
8062 static dw_die_ref
8063 clone_die (dw_die_ref die)
8064 {
8065 dw_die_ref clone = new_die_raw (die->die_tag);
8066 dw_attr_node *a;
8067 unsigned ix;
8068
8069 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8070 add_dwarf_attr (clone, a);
8071
8072 return clone;
8073 }
8074
8075 /* Make a clone of the tree rooted at DIE. */
8076
8077 static dw_die_ref
8078 clone_tree (dw_die_ref die)
8079 {
8080 dw_die_ref c;
8081 dw_die_ref clone = clone_die (die);
8082
8083 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8084
8085 return clone;
8086 }
8087
8088 /* Make a clone of DIE as a declaration. */
8089
8090 static dw_die_ref
8091 clone_as_declaration (dw_die_ref die)
8092 {
8093 dw_die_ref clone;
8094 dw_die_ref decl;
8095 dw_attr_node *a;
8096 unsigned ix;
8097
8098 /* If the DIE is already a declaration, just clone it. */
8099 if (is_declaration_die (die))
8100 return clone_die (die);
8101
8102 /* If the DIE is a specification, just clone its declaration DIE. */
8103 decl = get_AT_ref (die, DW_AT_specification);
8104 if (decl != NULL)
8105 {
8106 clone = clone_die (decl);
8107 if (die->comdat_type_p)
8108 add_AT_die_ref (clone, DW_AT_signature, die);
8109 return clone;
8110 }
8111
8112 clone = new_die_raw (die->die_tag);
8113
8114 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8115 {
8116 /* We don't want to copy over all attributes.
8117 For example we don't want DW_AT_byte_size because otherwise we will no
8118 longer have a declaration and GDB will treat it as a definition. */
8119
8120 switch (a->dw_attr)
8121 {
8122 case DW_AT_abstract_origin:
8123 case DW_AT_artificial:
8124 case DW_AT_containing_type:
8125 case DW_AT_external:
8126 case DW_AT_name:
8127 case DW_AT_type:
8128 case DW_AT_virtuality:
8129 case DW_AT_linkage_name:
8130 case DW_AT_MIPS_linkage_name:
8131 add_dwarf_attr (clone, a);
8132 break;
8133 case DW_AT_byte_size:
8134 case DW_AT_alignment:
8135 default:
8136 break;
8137 }
8138 }
8139
8140 if (die->comdat_type_p)
8141 add_AT_die_ref (clone, DW_AT_signature, die);
8142
8143 add_AT_flag (clone, DW_AT_declaration, 1);
8144 return clone;
8145 }
8146
8147
8148 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8149
8150 struct decl_table_entry
8151 {
8152 dw_die_ref orig;
8153 dw_die_ref copy;
8154 };
8155
8156 /* Helpers to manipulate hash table of copied declarations. */
8157
8158 /* Hashtable helpers. */
8159
8160 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8161 {
8162 typedef die_struct *compare_type;
8163 static inline hashval_t hash (const decl_table_entry *);
8164 static inline bool equal (const decl_table_entry *, const die_struct *);
8165 };
8166
8167 inline hashval_t
8168 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8169 {
8170 return htab_hash_pointer (entry->orig);
8171 }
8172
8173 inline bool
8174 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8175 const die_struct *entry2)
8176 {
8177 return entry1->orig == entry2;
8178 }
8179
8180 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8181
8182 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8183 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8184 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8185 to check if the ancestor has already been copied into UNIT. */
8186
8187 static dw_die_ref
8188 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8189 decl_hash_type *decl_table)
8190 {
8191 dw_die_ref parent = die->die_parent;
8192 dw_die_ref new_parent = unit;
8193 dw_die_ref copy;
8194 decl_table_entry **slot = NULL;
8195 struct decl_table_entry *entry = NULL;
8196
8197 /* If DIE refers to a stub unfold that so we get the appropriate
8198 DIE registered as orig in decl_table. */
8199 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8200 die = c;
8201
8202 if (decl_table)
8203 {
8204 /* Check if the entry has already been copied to UNIT. */
8205 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8206 INSERT);
8207 if (*slot != HTAB_EMPTY_ENTRY)
8208 {
8209 entry = *slot;
8210 return entry->copy;
8211 }
8212
8213 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8214 entry = XCNEW (struct decl_table_entry);
8215 entry->orig = die;
8216 entry->copy = NULL;
8217 *slot = entry;
8218 }
8219
8220 if (parent != NULL)
8221 {
8222 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8223 if (spec != NULL)
8224 parent = spec;
8225 if (!is_unit_die (parent))
8226 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8227 }
8228
8229 copy = clone_as_declaration (die);
8230 add_child_die (new_parent, copy);
8231
8232 if (decl_table)
8233 {
8234 /* Record the pointer to the copy. */
8235 entry->copy = copy;
8236 }
8237
8238 return copy;
8239 }
8240 /* Copy the declaration context to the new type unit DIE. This includes
8241 any surrounding namespace or type declarations. If the DIE has an
8242 AT_specification attribute, it also includes attributes and children
8243 attached to the specification, and returns a pointer to the original
8244 parent of the declaration DIE. Returns NULL otherwise. */
8245
8246 static dw_die_ref
8247 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8248 {
8249 dw_die_ref decl;
8250 dw_die_ref new_decl;
8251 dw_die_ref orig_parent = NULL;
8252
8253 decl = get_AT_ref (die, DW_AT_specification);
8254 if (decl == NULL)
8255 decl = die;
8256 else
8257 {
8258 unsigned ix;
8259 dw_die_ref c;
8260 dw_attr_node *a;
8261
8262 /* The original DIE will be changed to a declaration, and must
8263 be moved to be a child of the original declaration DIE. */
8264 orig_parent = decl->die_parent;
8265
8266 /* Copy the type node pointer from the new DIE to the original
8267 declaration DIE so we can forward references later. */
8268 decl->comdat_type_p = true;
8269 decl->die_id.die_type_node = die->die_id.die_type_node;
8270
8271 remove_AT (die, DW_AT_specification);
8272
8273 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8274 {
8275 if (a->dw_attr != DW_AT_name
8276 && a->dw_attr != DW_AT_declaration
8277 && a->dw_attr != DW_AT_external)
8278 add_dwarf_attr (die, a);
8279 }
8280
8281 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8282 }
8283
8284 if (decl->die_parent != NULL
8285 && !is_unit_die (decl->die_parent))
8286 {
8287 new_decl = copy_ancestor_tree (unit, decl, NULL);
8288 if (new_decl != NULL)
8289 {
8290 remove_AT (new_decl, DW_AT_signature);
8291 add_AT_specification (die, new_decl);
8292 }
8293 }
8294
8295 return orig_parent;
8296 }
8297
8298 /* Generate the skeleton ancestor tree for the given NODE, then clone
8299 the DIE and add the clone into the tree. */
8300
8301 static void
8302 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8303 {
8304 if (node->new_die != NULL)
8305 return;
8306
8307 node->new_die = clone_as_declaration (node->old_die);
8308
8309 if (node->parent != NULL)
8310 {
8311 generate_skeleton_ancestor_tree (node->parent);
8312 add_child_die (node->parent->new_die, node->new_die);
8313 }
8314 }
8315
8316 /* Generate a skeleton tree of DIEs containing any declarations that are
8317 found in the original tree. We traverse the tree looking for declaration
8318 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8319
8320 static void
8321 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8322 {
8323 skeleton_chain_node node;
8324 dw_die_ref c;
8325 dw_die_ref first;
8326 dw_die_ref prev = NULL;
8327 dw_die_ref next = NULL;
8328
8329 node.parent = parent;
8330
8331 first = c = parent->old_die->die_child;
8332 if (c)
8333 next = c->die_sib;
8334 if (c) do {
8335 if (prev == NULL || prev->die_sib == c)
8336 prev = c;
8337 c = next;
8338 next = (c == first ? NULL : c->die_sib);
8339 node.old_die = c;
8340 node.new_die = NULL;
8341 if (is_declaration_die (c))
8342 {
8343 if (is_template_instantiation (c))
8344 {
8345 /* Instantiated templates do not need to be cloned into the
8346 type unit. Just move the DIE and its children back to
8347 the skeleton tree (in the main CU). */
8348 remove_child_with_prev (c, prev);
8349 add_child_die (parent->new_die, c);
8350 c = prev;
8351 }
8352 else if (c->comdat_type_p)
8353 {
8354 /* This is the skeleton of earlier break_out_comdat_types
8355 type. Clone the existing DIE, but keep the children
8356 under the original (which is in the main CU). */
8357 dw_die_ref clone = clone_die (c);
8358
8359 replace_child (c, clone, prev);
8360 generate_skeleton_ancestor_tree (parent);
8361 add_child_die (parent->new_die, c);
8362 c = clone;
8363 continue;
8364 }
8365 else
8366 {
8367 /* Clone the existing DIE, move the original to the skeleton
8368 tree (which is in the main CU), and put the clone, with
8369 all the original's children, where the original came from
8370 (which is about to be moved to the type unit). */
8371 dw_die_ref clone = clone_die (c);
8372 move_all_children (c, clone);
8373
8374 /* If the original has a DW_AT_object_pointer attribute,
8375 it would now point to a child DIE just moved to the
8376 cloned tree, so we need to remove that attribute from
8377 the original. */
8378 remove_AT (c, DW_AT_object_pointer);
8379
8380 replace_child (c, clone, prev);
8381 generate_skeleton_ancestor_tree (parent);
8382 add_child_die (parent->new_die, c);
8383 node.old_die = clone;
8384 node.new_die = c;
8385 c = clone;
8386 }
8387 }
8388 generate_skeleton_bottom_up (&node);
8389 } while (next != NULL);
8390 }
8391
8392 /* Wrapper function for generate_skeleton_bottom_up. */
8393
8394 static dw_die_ref
8395 generate_skeleton (dw_die_ref die)
8396 {
8397 skeleton_chain_node node;
8398
8399 node.old_die = die;
8400 node.new_die = NULL;
8401 node.parent = NULL;
8402
8403 /* If this type definition is nested inside another type,
8404 and is not an instantiation of a template, always leave
8405 at least a declaration in its place. */
8406 if (die->die_parent != NULL
8407 && is_type_die (die->die_parent)
8408 && !is_template_instantiation (die))
8409 node.new_die = clone_as_declaration (die);
8410
8411 generate_skeleton_bottom_up (&node);
8412 return node.new_die;
8413 }
8414
8415 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8416 declaration. The original DIE is moved to a new compile unit so that
8417 existing references to it follow it to the new location. If any of the
8418 original DIE's descendants is a declaration, we need to replace the
8419 original DIE with a skeleton tree and move the declarations back into the
8420 skeleton tree. */
8421
8422 static dw_die_ref
8423 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8424 dw_die_ref prev)
8425 {
8426 dw_die_ref skeleton, orig_parent;
8427
8428 /* Copy the declaration context to the type unit DIE. If the returned
8429 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8430 that DIE. */
8431 orig_parent = copy_declaration_context (unit, child);
8432
8433 skeleton = generate_skeleton (child);
8434 if (skeleton == NULL)
8435 remove_child_with_prev (child, prev);
8436 else
8437 {
8438 skeleton->comdat_type_p = true;
8439 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8440
8441 /* If the original DIE was a specification, we need to put
8442 the skeleton under the parent DIE of the declaration.
8443 This leaves the original declaration in the tree, but
8444 it will be pruned later since there are no longer any
8445 references to it. */
8446 if (orig_parent != NULL)
8447 {
8448 remove_child_with_prev (child, prev);
8449 add_child_die (orig_parent, skeleton);
8450 }
8451 else
8452 replace_child (child, skeleton, prev);
8453 }
8454
8455 return skeleton;
8456 }
8457
8458 static void
8459 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8460 comdat_type_node *type_node,
8461 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8462
8463 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8464 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8465 DWARF procedure references in the DW_AT_location attribute. */
8466
8467 static dw_die_ref
8468 copy_dwarf_procedure (dw_die_ref die,
8469 comdat_type_node *type_node,
8470 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8471 {
8472 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8473
8474 /* DWARF procedures are not supposed to have children... */
8475 gcc_assert (die->die_child == NULL);
8476
8477 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8478 gcc_assert (vec_safe_length (die->die_attr) == 1
8479 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8480
8481 /* Do not copy more than once DWARF procedures. */
8482 bool existed;
8483 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8484 if (existed)
8485 return die_copy;
8486
8487 die_copy = clone_die (die);
8488 add_child_die (type_node->root_die, die_copy);
8489 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8490 return die_copy;
8491 }
8492
8493 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8494 procedures in DIE's attributes. */
8495
8496 static void
8497 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8498 comdat_type_node *type_node,
8499 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8500 {
8501 dw_attr_node *a;
8502 unsigned i;
8503
8504 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8505 {
8506 dw_loc_descr_ref loc;
8507
8508 if (a->dw_attr_val.val_class != dw_val_class_loc)
8509 continue;
8510
8511 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8512 {
8513 switch (loc->dw_loc_opc)
8514 {
8515 case DW_OP_call2:
8516 case DW_OP_call4:
8517 case DW_OP_call_ref:
8518 gcc_assert (loc->dw_loc_oprnd1.val_class
8519 == dw_val_class_die_ref);
8520 loc->dw_loc_oprnd1.v.val_die_ref.die
8521 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8522 type_node,
8523 copied_dwarf_procs);
8524
8525 default:
8526 break;
8527 }
8528 }
8529 }
8530 }
8531
8532 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8533 rewrite references to point to the copies.
8534
8535 References are looked for in DIE's attributes and recursively in all its
8536 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8537 mapping from old DWARF procedures to their copy. It is used not to copy
8538 twice the same DWARF procedure under TYPE_NODE. */
8539
8540 static void
8541 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8542 comdat_type_node *type_node,
8543 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8544 {
8545 dw_die_ref c;
8546
8547 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8548 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8549 type_node,
8550 copied_dwarf_procs));
8551 }
8552
8553 /* Traverse the DIE and set up additional .debug_types or .debug_info
8554 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8555 section. */
8556
8557 static void
8558 break_out_comdat_types (dw_die_ref die)
8559 {
8560 dw_die_ref c;
8561 dw_die_ref first;
8562 dw_die_ref prev = NULL;
8563 dw_die_ref next = NULL;
8564 dw_die_ref unit = NULL;
8565
8566 first = c = die->die_child;
8567 if (c)
8568 next = c->die_sib;
8569 if (c) do {
8570 if (prev == NULL || prev->die_sib == c)
8571 prev = c;
8572 c = next;
8573 next = (c == first ? NULL : c->die_sib);
8574 if (should_move_die_to_comdat (c))
8575 {
8576 dw_die_ref replacement;
8577 comdat_type_node *type_node;
8578
8579 /* Break out nested types into their own type units. */
8580 break_out_comdat_types (c);
8581
8582 /* Create a new type unit DIE as the root for the new tree. */
8583 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8584 add_AT_unsigned (unit, DW_AT_language,
8585 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8586
8587 /* Add the new unit's type DIE into the comdat type list. */
8588 type_node = ggc_cleared_alloc<comdat_type_node> ();
8589 type_node->root_die = unit;
8590 type_node->next = comdat_type_list;
8591 comdat_type_list = type_node;
8592
8593 /* Generate the type signature. */
8594 generate_type_signature (c, type_node);
8595
8596 /* Copy the declaration context, attributes, and children of the
8597 declaration into the new type unit DIE, then remove this DIE
8598 from the main CU (or replace it with a skeleton if necessary). */
8599 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8600 type_node->skeleton_die = replacement;
8601
8602 /* Add the DIE to the new compunit. */
8603 add_child_die (unit, c);
8604
8605 /* Types can reference DWARF procedures for type size or data location
8606 expressions. Calls in DWARF expressions cannot target procedures
8607 that are not in the same section. So we must copy DWARF procedures
8608 along with this type and then rewrite references to them. */
8609 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8610 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8611
8612 if (replacement != NULL)
8613 c = replacement;
8614 }
8615 else if (c->die_tag == DW_TAG_namespace
8616 || c->die_tag == DW_TAG_class_type
8617 || c->die_tag == DW_TAG_structure_type
8618 || c->die_tag == DW_TAG_union_type)
8619 {
8620 /* Look for nested types that can be broken out. */
8621 break_out_comdat_types (c);
8622 }
8623 } while (next != NULL);
8624 }
8625
8626 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8627 Enter all the cloned children into the hash table decl_table. */
8628
8629 static dw_die_ref
8630 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8631 {
8632 dw_die_ref c;
8633 dw_die_ref clone;
8634 struct decl_table_entry *entry;
8635 decl_table_entry **slot;
8636
8637 if (die->die_tag == DW_TAG_subprogram)
8638 clone = clone_as_declaration (die);
8639 else
8640 clone = clone_die (die);
8641
8642 slot = decl_table->find_slot_with_hash (die,
8643 htab_hash_pointer (die), INSERT);
8644
8645 /* Assert that DIE isn't in the hash table yet. If it would be there
8646 before, the ancestors would be necessarily there as well, therefore
8647 clone_tree_partial wouldn't be called. */
8648 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8649
8650 entry = XCNEW (struct decl_table_entry);
8651 entry->orig = die;
8652 entry->copy = clone;
8653 *slot = entry;
8654
8655 if (die->die_tag != DW_TAG_subprogram)
8656 FOR_EACH_CHILD (die, c,
8657 add_child_die (clone, clone_tree_partial (c, decl_table)));
8658
8659 return clone;
8660 }
8661
8662 /* Walk the DIE and its children, looking for references to incomplete
8663 or trivial types that are unmarked (i.e., that are not in the current
8664 type_unit). */
8665
8666 static void
8667 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8668 {
8669 dw_die_ref c;
8670 dw_attr_node *a;
8671 unsigned ix;
8672
8673 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8674 {
8675 if (AT_class (a) == dw_val_class_die_ref)
8676 {
8677 dw_die_ref targ = AT_ref (a);
8678 decl_table_entry **slot;
8679 struct decl_table_entry *entry;
8680
8681 if (targ->die_mark != 0 || targ->comdat_type_p)
8682 continue;
8683
8684 slot = decl_table->find_slot_with_hash (targ,
8685 htab_hash_pointer (targ),
8686 INSERT);
8687
8688 if (*slot != HTAB_EMPTY_ENTRY)
8689 {
8690 /* TARG has already been copied, so we just need to
8691 modify the reference to point to the copy. */
8692 entry = *slot;
8693 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8694 }
8695 else
8696 {
8697 dw_die_ref parent = unit;
8698 dw_die_ref copy = clone_die (targ);
8699
8700 /* Record in DECL_TABLE that TARG has been copied.
8701 Need to do this now, before the recursive call,
8702 because DECL_TABLE may be expanded and SLOT
8703 would no longer be a valid pointer. */
8704 entry = XCNEW (struct decl_table_entry);
8705 entry->orig = targ;
8706 entry->copy = copy;
8707 *slot = entry;
8708
8709 /* If TARG is not a declaration DIE, we need to copy its
8710 children. */
8711 if (!is_declaration_die (targ))
8712 {
8713 FOR_EACH_CHILD (
8714 targ, c,
8715 add_child_die (copy,
8716 clone_tree_partial (c, decl_table)));
8717 }
8718
8719 /* Make sure the cloned tree is marked as part of the
8720 type unit. */
8721 mark_dies (copy);
8722
8723 /* If TARG has surrounding context, copy its ancestor tree
8724 into the new type unit. */
8725 if (targ->die_parent != NULL
8726 && !is_unit_die (targ->die_parent))
8727 parent = copy_ancestor_tree (unit, targ->die_parent,
8728 decl_table);
8729
8730 add_child_die (parent, copy);
8731 a->dw_attr_val.v.val_die_ref.die = copy;
8732
8733 /* Make sure the newly-copied DIE is walked. If it was
8734 installed in a previously-added context, it won't
8735 get visited otherwise. */
8736 if (parent != unit)
8737 {
8738 /* Find the highest point of the newly-added tree,
8739 mark each node along the way, and walk from there. */
8740 parent->die_mark = 1;
8741 while (parent->die_parent
8742 && parent->die_parent->die_mark == 0)
8743 {
8744 parent = parent->die_parent;
8745 parent->die_mark = 1;
8746 }
8747 copy_decls_walk (unit, parent, decl_table);
8748 }
8749 }
8750 }
8751 }
8752
8753 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8754 }
8755
8756 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8757 and record them in DECL_TABLE. */
8758
8759 static void
8760 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8761 {
8762 dw_die_ref c;
8763
8764 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8765 {
8766 dw_die_ref targ = AT_ref (a);
8767 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8768 decl_table_entry **slot
8769 = decl_table->find_slot_with_hash (targ,
8770 htab_hash_pointer (targ),
8771 INSERT);
8772 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8773 /* Record in DECL_TABLE that TARG has been already copied
8774 by remove_child_or_replace_with_skeleton. */
8775 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8776 entry->orig = targ;
8777 entry->copy = die;
8778 *slot = entry;
8779 }
8780 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8781 }
8782
8783 /* Copy declarations for "unworthy" types into the new comdat section.
8784 Incomplete types, modified types, and certain other types aren't broken
8785 out into comdat sections of their own, so they don't have a signature,
8786 and we need to copy the declaration into the same section so that we
8787 don't have an external reference. */
8788
8789 static void
8790 copy_decls_for_unworthy_types (dw_die_ref unit)
8791 {
8792 mark_dies (unit);
8793 decl_hash_type decl_table (10);
8794 collect_skeleton_dies (unit, &decl_table);
8795 copy_decls_walk (unit, unit, &decl_table);
8796 unmark_dies (unit);
8797 }
8798
8799 /* Traverse the DIE and add a sibling attribute if it may have the
8800 effect of speeding up access to siblings. To save some space,
8801 avoid generating sibling attributes for DIE's without children. */
8802
8803 static void
8804 add_sibling_attributes (dw_die_ref die)
8805 {
8806 dw_die_ref c;
8807
8808 if (! die->die_child)
8809 return;
8810
8811 if (die->die_parent && die != die->die_parent->die_child)
8812 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8813
8814 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8815 }
8816
8817 /* Output all location lists for the DIE and its children. */
8818
8819 static void
8820 output_location_lists (dw_die_ref die)
8821 {
8822 dw_die_ref c;
8823 dw_attr_node *a;
8824 unsigned ix;
8825
8826 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8827 if (AT_class (a) == dw_val_class_loc_list)
8828 output_loc_list (AT_loc_list (a));
8829
8830 FOR_EACH_CHILD (die, c, output_location_lists (c));
8831 }
8832
8833 /* During assign_location_list_indexes and output_loclists_offset the
8834 current index, after it the number of assigned indexes (i.e. how
8835 large the .debug_loclists* offset table should be). */
8836 static unsigned int loc_list_idx;
8837
8838 /* Output all location list offsets for the DIE and its children. */
8839
8840 static void
8841 output_loclists_offsets (dw_die_ref die)
8842 {
8843 dw_die_ref c;
8844 dw_attr_node *a;
8845 unsigned ix;
8846
8847 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8848 if (AT_class (a) == dw_val_class_loc_list)
8849 {
8850 dw_loc_list_ref l = AT_loc_list (a);
8851 if (l->offset_emitted)
8852 continue;
8853 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8854 loc_section_label, NULL);
8855 gcc_assert (l->hash == loc_list_idx);
8856 loc_list_idx++;
8857 l->offset_emitted = true;
8858 }
8859
8860 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8861 }
8862
8863 /* Recursively set indexes of location lists. */
8864
8865 static void
8866 assign_location_list_indexes (dw_die_ref die)
8867 {
8868 dw_die_ref c;
8869 dw_attr_node *a;
8870 unsigned ix;
8871
8872 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8873 if (AT_class (a) == dw_val_class_loc_list)
8874 {
8875 dw_loc_list_ref list = AT_loc_list (a);
8876 if (!list->num_assigned)
8877 {
8878 list->num_assigned = true;
8879 list->hash = loc_list_idx++;
8880 }
8881 }
8882
8883 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8884 }
8885
8886 /* We want to limit the number of external references, because they are
8887 larger than local references: a relocation takes multiple words, and
8888 even a sig8 reference is always eight bytes, whereas a local reference
8889 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8890 So if we encounter multiple external references to the same type DIE, we
8891 make a local typedef stub for it and redirect all references there.
8892
8893 This is the element of the hash table for keeping track of these
8894 references. */
8895
8896 struct external_ref
8897 {
8898 dw_die_ref type;
8899 dw_die_ref stub;
8900 unsigned n_refs;
8901 };
8902
8903 /* Hashtable helpers. */
8904
8905 struct external_ref_hasher : free_ptr_hash <external_ref>
8906 {
8907 static inline hashval_t hash (const external_ref *);
8908 static inline bool equal (const external_ref *, const external_ref *);
8909 };
8910
8911 inline hashval_t
8912 external_ref_hasher::hash (const external_ref *r)
8913 {
8914 dw_die_ref die = r->type;
8915 hashval_t h = 0;
8916
8917 /* We can't use the address of the DIE for hashing, because
8918 that will make the order of the stub DIEs non-deterministic. */
8919 if (! die->comdat_type_p)
8920 /* We have a symbol; use it to compute a hash. */
8921 h = htab_hash_string (die->die_id.die_symbol);
8922 else
8923 {
8924 /* We have a type signature; use a subset of the bits as the hash.
8925 The 8-byte signature is at least as large as hashval_t. */
8926 comdat_type_node *type_node = die->die_id.die_type_node;
8927 memcpy (&h, type_node->signature, sizeof (h));
8928 }
8929 return h;
8930 }
8931
8932 inline bool
8933 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8934 {
8935 return r1->type == r2->type;
8936 }
8937
8938 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8939
8940 /* Return a pointer to the external_ref for references to DIE. */
8941
8942 static struct external_ref *
8943 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8944 {
8945 struct external_ref ref, *ref_p;
8946 external_ref **slot;
8947
8948 ref.type = die;
8949 slot = map->find_slot (&ref, INSERT);
8950 if (*slot != HTAB_EMPTY_ENTRY)
8951 return *slot;
8952
8953 ref_p = XCNEW (struct external_ref);
8954 ref_p->type = die;
8955 *slot = ref_p;
8956 return ref_p;
8957 }
8958
8959 /* Subroutine of optimize_external_refs, below.
8960
8961 If we see a type skeleton, record it as our stub. If we see external
8962 references, remember how many we've seen. */
8963
8964 static void
8965 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8966 {
8967 dw_die_ref c;
8968 dw_attr_node *a;
8969 unsigned ix;
8970 struct external_ref *ref_p;
8971
8972 if (is_type_die (die)
8973 && (c = get_AT_ref (die, DW_AT_signature)))
8974 {
8975 /* This is a local skeleton; use it for local references. */
8976 ref_p = lookup_external_ref (map, c);
8977 ref_p->stub = die;
8978 }
8979
8980 /* Scan the DIE references, and remember any that refer to DIEs from
8981 other CUs (i.e. those which are not marked). */
8982 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8983 if (AT_class (a) == dw_val_class_die_ref
8984 && (c = AT_ref (a))->die_mark == 0
8985 && is_type_die (c))
8986 {
8987 ref_p = lookup_external_ref (map, c);
8988 ref_p->n_refs++;
8989 }
8990
8991 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8992 }
8993
8994 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8995 points to an external_ref, DATA is the CU we're processing. If we don't
8996 already have a local stub, and we have multiple refs, build a stub. */
8997
8998 int
8999 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
9000 {
9001 struct external_ref *ref_p = *slot;
9002
9003 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9004 {
9005 /* We have multiple references to this type, so build a small stub.
9006 Both of these forms are a bit dodgy from the perspective of the
9007 DWARF standard, since technically they should have names. */
9008 dw_die_ref cu = data;
9009 dw_die_ref type = ref_p->type;
9010 dw_die_ref stub = NULL;
9011
9012 if (type->comdat_type_p)
9013 {
9014 /* If we refer to this type via sig8, use AT_signature. */
9015 stub = new_die (type->die_tag, cu, NULL_TREE);
9016 add_AT_die_ref (stub, DW_AT_signature, type);
9017 }
9018 else
9019 {
9020 /* Otherwise, use a typedef with no name. */
9021 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9022 add_AT_die_ref (stub, DW_AT_type, type);
9023 }
9024
9025 stub->die_mark++;
9026 ref_p->stub = stub;
9027 }
9028 return 1;
9029 }
9030
9031 /* DIE is a unit; look through all the DIE references to see if there are
9032 any external references to types, and if so, create local stubs for
9033 them which will be applied in build_abbrev_table. This is useful because
9034 references to local DIEs are smaller. */
9035
9036 static external_ref_hash_type *
9037 optimize_external_refs (dw_die_ref die)
9038 {
9039 external_ref_hash_type *map = new external_ref_hash_type (10);
9040 optimize_external_refs_1 (die, map);
9041 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9042 return map;
9043 }
9044
9045 /* The following 3 variables are temporaries that are computed only during the
9046 build_abbrev_table call and used and released during the following
9047 optimize_abbrev_table call. */
9048
9049 /* First abbrev_id that can be optimized based on usage. */
9050 static unsigned int abbrev_opt_start;
9051
9052 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9053 abbrev_id smaller than this, because they must be already sized
9054 during build_abbrev_table). */
9055 static unsigned int abbrev_opt_base_type_end;
9056
9057 /* Vector of usage counts during build_abbrev_table. Indexed by
9058 abbrev_id - abbrev_opt_start. */
9059 static vec<unsigned int> abbrev_usage_count;
9060
9061 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9062 static vec<dw_die_ref> sorted_abbrev_dies;
9063
9064 /* The format of each DIE (and its attribute value pairs) is encoded in an
9065 abbreviation table. This routine builds the abbreviation table and assigns
9066 a unique abbreviation id for each abbreviation entry. The children of each
9067 die are visited recursively. */
9068
9069 static void
9070 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9071 {
9072 unsigned int abbrev_id = 0;
9073 dw_die_ref c;
9074 dw_attr_node *a;
9075 unsigned ix;
9076 dw_die_ref abbrev;
9077
9078 /* Scan the DIE references, and replace any that refer to
9079 DIEs from other CUs (i.e. those which are not marked) with
9080 the local stubs we built in optimize_external_refs. */
9081 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9082 if (AT_class (a) == dw_val_class_die_ref
9083 && (c = AT_ref (a))->die_mark == 0)
9084 {
9085 struct external_ref *ref_p;
9086 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9087
9088 if (is_type_die (c)
9089 && (ref_p = lookup_external_ref (extern_map, c))
9090 && ref_p->stub && ref_p->stub != die)
9091 {
9092 gcc_assert (a->dw_attr != DW_AT_signature);
9093 change_AT_die_ref (a, ref_p->stub);
9094 }
9095 else
9096 /* We aren't changing this reference, so mark it external. */
9097 set_AT_ref_external (a, 1);
9098 }
9099
9100 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9101 {
9102 dw_attr_node *die_a, *abbrev_a;
9103 unsigned ix;
9104 bool ok = true;
9105
9106 if (abbrev_id == 0)
9107 continue;
9108 if (abbrev->die_tag != die->die_tag)
9109 continue;
9110 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9111 continue;
9112
9113 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9114 continue;
9115
9116 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9117 {
9118 abbrev_a = &(*abbrev->die_attr)[ix];
9119 if ((abbrev_a->dw_attr != die_a->dw_attr)
9120 || (value_format (abbrev_a) != value_format (die_a)))
9121 {
9122 ok = false;
9123 break;
9124 }
9125 }
9126 if (ok)
9127 break;
9128 }
9129
9130 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9131 {
9132 vec_safe_push (abbrev_die_table, die);
9133 if (abbrev_opt_start)
9134 abbrev_usage_count.safe_push (0);
9135 }
9136 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9137 {
9138 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9139 sorted_abbrev_dies.safe_push (die);
9140 }
9141
9142 die->die_abbrev = abbrev_id;
9143 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9144 }
9145
9146 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9147 by die_abbrev's usage count, from the most commonly used
9148 abbreviation to the least. */
9149
9150 static int
9151 die_abbrev_cmp (const void *p1, const void *p2)
9152 {
9153 dw_die_ref die1 = *(const dw_die_ref *) p1;
9154 dw_die_ref die2 = *(const dw_die_ref *) p2;
9155
9156 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9157 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9158
9159 if (die1->die_abbrev >= abbrev_opt_base_type_end
9160 && die2->die_abbrev >= abbrev_opt_base_type_end)
9161 {
9162 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9163 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9164 return -1;
9165 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9166 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9167 return 1;
9168 }
9169
9170 /* Stabilize the sort. */
9171 if (die1->die_abbrev < die2->die_abbrev)
9172 return -1;
9173 if (die1->die_abbrev > die2->die_abbrev)
9174 return 1;
9175
9176 return 0;
9177 }
9178
9179 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9180 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9181 into dw_val_class_const_implicit or
9182 dw_val_class_unsigned_const_implicit. */
9183
9184 static void
9185 optimize_implicit_const (unsigned int first_id, unsigned int end,
9186 vec<bool> &implicit_consts)
9187 {
9188 /* It never makes sense if there is just one DIE using the abbreviation. */
9189 if (end < first_id + 2)
9190 return;
9191
9192 dw_attr_node *a;
9193 unsigned ix, i;
9194 dw_die_ref die = sorted_abbrev_dies[first_id];
9195 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9196 if (implicit_consts[ix])
9197 {
9198 enum dw_val_class new_class = dw_val_class_none;
9199 switch (AT_class (a))
9200 {
9201 case dw_val_class_unsigned_const:
9202 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9203 continue;
9204
9205 /* The .debug_abbrev section will grow by
9206 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9207 in all the DIEs using that abbreviation. */
9208 if (constant_size (AT_unsigned (a)) * (end - first_id)
9209 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9210 continue;
9211
9212 new_class = dw_val_class_unsigned_const_implicit;
9213 break;
9214
9215 case dw_val_class_const:
9216 new_class = dw_val_class_const_implicit;
9217 break;
9218
9219 case dw_val_class_file:
9220 new_class = dw_val_class_file_implicit;
9221 break;
9222
9223 default:
9224 continue;
9225 }
9226 for (i = first_id; i < end; i++)
9227 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9228 = new_class;
9229 }
9230 }
9231
9232 /* Attempt to optimize abbreviation table from abbrev_opt_start
9233 abbreviation above. */
9234
9235 static void
9236 optimize_abbrev_table (void)
9237 {
9238 if (abbrev_opt_start
9239 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9240 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9241 {
9242 auto_vec<bool, 32> implicit_consts;
9243 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9244
9245 unsigned int abbrev_id = abbrev_opt_start - 1;
9246 unsigned int first_id = ~0U;
9247 unsigned int last_abbrev_id = 0;
9248 unsigned int i;
9249 dw_die_ref die;
9250 if (abbrev_opt_base_type_end > abbrev_opt_start)
9251 abbrev_id = abbrev_opt_base_type_end - 1;
9252 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9253 most commonly used abbreviations come first. */
9254 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9255 {
9256 dw_attr_node *a;
9257 unsigned ix;
9258
9259 /* If calc_base_type_die_sizes has been called, the CU and
9260 base types after it can't be optimized, because we've already
9261 calculated their DIE offsets. We've sorted them first. */
9262 if (die->die_abbrev < abbrev_opt_base_type_end)
9263 continue;
9264 if (die->die_abbrev != last_abbrev_id)
9265 {
9266 last_abbrev_id = die->die_abbrev;
9267 if (dwarf_version >= 5 && first_id != ~0U)
9268 optimize_implicit_const (first_id, i, implicit_consts);
9269 abbrev_id++;
9270 (*abbrev_die_table)[abbrev_id] = die;
9271 if (dwarf_version >= 5)
9272 {
9273 first_id = i;
9274 implicit_consts.truncate (0);
9275
9276 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9277 switch (AT_class (a))
9278 {
9279 case dw_val_class_const:
9280 case dw_val_class_unsigned_const:
9281 case dw_val_class_file:
9282 implicit_consts.safe_push (true);
9283 break;
9284 default:
9285 implicit_consts.safe_push (false);
9286 break;
9287 }
9288 }
9289 }
9290 else if (dwarf_version >= 5)
9291 {
9292 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9293 if (!implicit_consts[ix])
9294 continue;
9295 else
9296 {
9297 dw_attr_node *other_a
9298 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9299 if (!dw_val_equal_p (&a->dw_attr_val,
9300 &other_a->dw_attr_val))
9301 implicit_consts[ix] = false;
9302 }
9303 }
9304 die->die_abbrev = abbrev_id;
9305 }
9306 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9307 if (dwarf_version >= 5 && first_id != ~0U)
9308 optimize_implicit_const (first_id, i, implicit_consts);
9309 }
9310
9311 abbrev_opt_start = 0;
9312 abbrev_opt_base_type_end = 0;
9313 abbrev_usage_count.release ();
9314 sorted_abbrev_dies.release ();
9315 }
9316 \f
9317 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9318
9319 static int
9320 constant_size (unsigned HOST_WIDE_INT value)
9321 {
9322 int log;
9323
9324 if (value == 0)
9325 log = 0;
9326 else
9327 log = floor_log2 (value);
9328
9329 log = log / 8;
9330 log = 1 << (floor_log2 (log) + 1);
9331
9332 return log;
9333 }
9334
9335 /* Return the size of a DIE as it is represented in the
9336 .debug_info section. */
9337
9338 static unsigned long
9339 size_of_die (dw_die_ref die)
9340 {
9341 unsigned long size = 0;
9342 dw_attr_node *a;
9343 unsigned ix;
9344 enum dwarf_form form;
9345
9346 size += size_of_uleb128 (die->die_abbrev);
9347 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9348 {
9349 switch (AT_class (a))
9350 {
9351 case dw_val_class_addr:
9352 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9353 {
9354 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9355 size += size_of_uleb128 (AT_index (a));
9356 }
9357 else
9358 size += DWARF2_ADDR_SIZE;
9359 break;
9360 case dw_val_class_offset:
9361 size += DWARF_OFFSET_SIZE;
9362 break;
9363 case dw_val_class_loc:
9364 {
9365 unsigned long lsize = size_of_locs (AT_loc (a));
9366
9367 /* Block length. */
9368 if (dwarf_version >= 4)
9369 size += size_of_uleb128 (lsize);
9370 else
9371 size += constant_size (lsize);
9372 size += lsize;
9373 }
9374 break;
9375 case dw_val_class_loc_list:
9376 if (dwarf_split_debug_info && dwarf_version >= 5)
9377 {
9378 gcc_assert (AT_loc_list (a)->num_assigned);
9379 size += size_of_uleb128 (AT_loc_list (a)->hash);
9380 }
9381 else
9382 size += DWARF_OFFSET_SIZE;
9383 break;
9384 case dw_val_class_view_list:
9385 size += DWARF_OFFSET_SIZE;
9386 break;
9387 case dw_val_class_range_list:
9388 if (value_format (a) == DW_FORM_rnglistx)
9389 {
9390 gcc_assert (rnglist_idx);
9391 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9392 size += size_of_uleb128 (r->idx);
9393 }
9394 else
9395 size += DWARF_OFFSET_SIZE;
9396 break;
9397 case dw_val_class_const:
9398 size += size_of_sleb128 (AT_int (a));
9399 break;
9400 case dw_val_class_unsigned_const:
9401 {
9402 int csize = constant_size (AT_unsigned (a));
9403 if (dwarf_version == 3
9404 && a->dw_attr == DW_AT_data_member_location
9405 && csize >= 4)
9406 size += size_of_uleb128 (AT_unsigned (a));
9407 else
9408 size += csize;
9409 }
9410 break;
9411 case dw_val_class_symview:
9412 if (symview_upper_bound <= 0xff)
9413 size += 1;
9414 else if (symview_upper_bound <= 0xffff)
9415 size += 2;
9416 else if (symview_upper_bound <= 0xffffffff)
9417 size += 4;
9418 else
9419 size += 8;
9420 break;
9421 case dw_val_class_const_implicit:
9422 case dw_val_class_unsigned_const_implicit:
9423 case dw_val_class_file_implicit:
9424 /* These occupy no size in the DIE, just an extra sleb128 in
9425 .debug_abbrev. */
9426 break;
9427 case dw_val_class_const_double:
9428 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9429 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9430 size++; /* block */
9431 break;
9432 case dw_val_class_wide_int:
9433 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9434 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9435 if (get_full_len (*a->dw_attr_val.v.val_wide)
9436 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9437 size++; /* block */
9438 break;
9439 case dw_val_class_vec:
9440 size += constant_size (a->dw_attr_val.v.val_vec.length
9441 * a->dw_attr_val.v.val_vec.elt_size)
9442 + a->dw_attr_val.v.val_vec.length
9443 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9444 break;
9445 case dw_val_class_flag:
9446 if (dwarf_version >= 4)
9447 /* Currently all add_AT_flag calls pass in 1 as last argument,
9448 so DW_FORM_flag_present can be used. If that ever changes,
9449 we'll need to use DW_FORM_flag and have some optimization
9450 in build_abbrev_table that will change those to
9451 DW_FORM_flag_present if it is set to 1 in all DIEs using
9452 the same abbrev entry. */
9453 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9454 else
9455 size += 1;
9456 break;
9457 case dw_val_class_die_ref:
9458 if (AT_ref_external (a))
9459 {
9460 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9461 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9462 is sized by target address length, whereas in DWARF3
9463 it's always sized as an offset. */
9464 if (AT_ref (a)->comdat_type_p)
9465 size += DWARF_TYPE_SIGNATURE_SIZE;
9466 else if (dwarf_version == 2)
9467 size += DWARF2_ADDR_SIZE;
9468 else
9469 size += DWARF_OFFSET_SIZE;
9470 }
9471 else
9472 size += DWARF_OFFSET_SIZE;
9473 break;
9474 case dw_val_class_fde_ref:
9475 size += DWARF_OFFSET_SIZE;
9476 break;
9477 case dw_val_class_lbl_id:
9478 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9479 {
9480 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9481 size += size_of_uleb128 (AT_index (a));
9482 }
9483 else
9484 size += DWARF2_ADDR_SIZE;
9485 break;
9486 case dw_val_class_lineptr:
9487 case dw_val_class_macptr:
9488 case dw_val_class_loclistsptr:
9489 size += DWARF_OFFSET_SIZE;
9490 break;
9491 case dw_val_class_str:
9492 form = AT_string_form (a);
9493 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9494 size += DWARF_OFFSET_SIZE;
9495 else if (form == dwarf_FORM (DW_FORM_strx))
9496 size += size_of_uleb128 (AT_index (a));
9497 else
9498 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9499 break;
9500 case dw_val_class_file:
9501 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9502 break;
9503 case dw_val_class_data8:
9504 size += 8;
9505 break;
9506 case dw_val_class_vms_delta:
9507 size += DWARF_OFFSET_SIZE;
9508 break;
9509 case dw_val_class_high_pc:
9510 size += DWARF2_ADDR_SIZE;
9511 break;
9512 case dw_val_class_discr_value:
9513 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9514 break;
9515 case dw_val_class_discr_list:
9516 {
9517 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9518
9519 /* This is a block, so we have the block length and then its
9520 data. */
9521 size += constant_size (block_size) + block_size;
9522 }
9523 break;
9524 default:
9525 gcc_unreachable ();
9526 }
9527 }
9528
9529 return size;
9530 }
9531
9532 /* Size the debugging information associated with a given DIE. Visits the
9533 DIE's children recursively. Updates the global variable next_die_offset, on
9534 each time through. Uses the current value of next_die_offset to update the
9535 die_offset field in each DIE. */
9536
9537 static void
9538 calc_die_sizes (dw_die_ref die)
9539 {
9540 dw_die_ref c;
9541
9542 gcc_assert (die->die_offset == 0
9543 || (unsigned long int) die->die_offset == next_die_offset);
9544 die->die_offset = next_die_offset;
9545 next_die_offset += size_of_die (die);
9546
9547 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9548
9549 if (die->die_child != NULL)
9550 /* Count the null byte used to terminate sibling lists. */
9551 next_die_offset += 1;
9552 }
9553
9554 /* Size just the base type children at the start of the CU.
9555 This is needed because build_abbrev needs to size locs
9556 and sizing of type based stack ops needs to know die_offset
9557 values for the base types. */
9558
9559 static void
9560 calc_base_type_die_sizes (void)
9561 {
9562 unsigned long die_offset = (dwarf_split_debug_info
9563 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9564 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9565 unsigned int i;
9566 dw_die_ref base_type;
9567 #if ENABLE_ASSERT_CHECKING
9568 dw_die_ref prev = comp_unit_die ()->die_child;
9569 #endif
9570
9571 die_offset += size_of_die (comp_unit_die ());
9572 for (i = 0; base_types.iterate (i, &base_type); i++)
9573 {
9574 #if ENABLE_ASSERT_CHECKING
9575 gcc_assert (base_type->die_offset == 0
9576 && prev->die_sib == base_type
9577 && base_type->die_child == NULL
9578 && base_type->die_abbrev);
9579 prev = base_type;
9580 #endif
9581 if (abbrev_opt_start
9582 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9583 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9584 base_type->die_offset = die_offset;
9585 die_offset += size_of_die (base_type);
9586 }
9587 }
9588
9589 /* Set the marks for a die and its children. We do this so
9590 that we know whether or not a reference needs to use FORM_ref_addr; only
9591 DIEs in the same CU will be marked. We used to clear out the offset
9592 and use that as the flag, but ran into ordering problems. */
9593
9594 static void
9595 mark_dies (dw_die_ref die)
9596 {
9597 dw_die_ref c;
9598
9599 gcc_assert (!die->die_mark);
9600
9601 die->die_mark = 1;
9602 FOR_EACH_CHILD (die, c, mark_dies (c));
9603 }
9604
9605 /* Clear the marks for a die and its children. */
9606
9607 static void
9608 unmark_dies (dw_die_ref die)
9609 {
9610 dw_die_ref c;
9611
9612 if (! use_debug_types)
9613 gcc_assert (die->die_mark);
9614
9615 die->die_mark = 0;
9616 FOR_EACH_CHILD (die, c, unmark_dies (c));
9617 }
9618
9619 /* Clear the marks for a die, its children and referred dies. */
9620
9621 static void
9622 unmark_all_dies (dw_die_ref die)
9623 {
9624 dw_die_ref c;
9625 dw_attr_node *a;
9626 unsigned ix;
9627
9628 if (!die->die_mark)
9629 return;
9630 die->die_mark = 0;
9631
9632 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9633
9634 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9635 if (AT_class (a) == dw_val_class_die_ref)
9636 unmark_all_dies (AT_ref (a));
9637 }
9638
9639 /* Calculate if the entry should appear in the final output file. It may be
9640 from a pruned a type. */
9641
9642 static bool
9643 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9644 {
9645 /* By limiting gnu pubnames to definitions only, gold can generate a
9646 gdb index without entries for declarations, which don't include
9647 enough information to be useful. */
9648 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9649 return false;
9650
9651 if (table == pubname_table)
9652 {
9653 /* Enumerator names are part of the pubname table, but the
9654 parent DW_TAG_enumeration_type die may have been pruned.
9655 Don't output them if that is the case. */
9656 if (p->die->die_tag == DW_TAG_enumerator &&
9657 (p->die->die_parent == NULL
9658 || !p->die->die_parent->die_perennial_p))
9659 return false;
9660
9661 /* Everything else in the pubname table is included. */
9662 return true;
9663 }
9664
9665 /* The pubtypes table shouldn't include types that have been
9666 pruned. */
9667 return (p->die->die_offset != 0
9668 || !flag_eliminate_unused_debug_types);
9669 }
9670
9671 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9672 generated for the compilation unit. */
9673
9674 static unsigned long
9675 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9676 {
9677 unsigned long size;
9678 unsigned i;
9679 pubname_entry *p;
9680 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9681
9682 size = DWARF_PUBNAMES_HEADER_SIZE;
9683 FOR_EACH_VEC_ELT (*names, i, p)
9684 if (include_pubname_in_output (names, p))
9685 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9686
9687 size += DWARF_OFFSET_SIZE;
9688 return size;
9689 }
9690
9691 /* Return the size of the information in the .debug_aranges section. */
9692
9693 static unsigned long
9694 size_of_aranges (void)
9695 {
9696 unsigned long size;
9697
9698 size = DWARF_ARANGES_HEADER_SIZE;
9699
9700 /* Count the address/length pair for this compilation unit. */
9701 if (text_section_used)
9702 size += 2 * DWARF2_ADDR_SIZE;
9703 if (cold_text_section_used)
9704 size += 2 * DWARF2_ADDR_SIZE;
9705 if (have_multiple_function_sections)
9706 {
9707 unsigned fde_idx;
9708 dw_fde_ref fde;
9709
9710 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9711 {
9712 if (DECL_IGNORED_P (fde->decl))
9713 continue;
9714 if (!fde->in_std_section)
9715 size += 2 * DWARF2_ADDR_SIZE;
9716 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9717 size += 2 * DWARF2_ADDR_SIZE;
9718 }
9719 }
9720
9721 /* Count the two zero words used to terminated the address range table. */
9722 size += 2 * DWARF2_ADDR_SIZE;
9723 return size;
9724 }
9725 \f
9726 /* Select the encoding of an attribute value. */
9727
9728 static enum dwarf_form
9729 value_format (dw_attr_node *a)
9730 {
9731 switch (AT_class (a))
9732 {
9733 case dw_val_class_addr:
9734 /* Only very few attributes allow DW_FORM_addr. */
9735 switch (a->dw_attr)
9736 {
9737 case DW_AT_low_pc:
9738 case DW_AT_high_pc:
9739 case DW_AT_entry_pc:
9740 case DW_AT_trampoline:
9741 return (AT_index (a) == NOT_INDEXED
9742 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9743 default:
9744 break;
9745 }
9746 switch (DWARF2_ADDR_SIZE)
9747 {
9748 case 1:
9749 return DW_FORM_data1;
9750 case 2:
9751 return DW_FORM_data2;
9752 case 4:
9753 return DW_FORM_data4;
9754 case 8:
9755 return DW_FORM_data8;
9756 default:
9757 gcc_unreachable ();
9758 }
9759 case dw_val_class_loc_list:
9760 if (dwarf_split_debug_info
9761 && dwarf_version >= 5
9762 && AT_loc_list (a)->num_assigned)
9763 return DW_FORM_loclistx;
9764 /* FALLTHRU */
9765 case dw_val_class_view_list:
9766 case dw_val_class_range_list:
9767 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9768 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9769 care about sizes of .debug* sections in shared libraries and
9770 executables and don't take into account relocations that affect just
9771 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9772 table in the .debug_rnglists section. */
9773 if (dwarf_split_debug_info
9774 && dwarf_version >= 5
9775 && AT_class (a) == dw_val_class_range_list
9776 && rnglist_idx
9777 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9778 return DW_FORM_rnglistx;
9779 if (dwarf_version >= 4)
9780 return DW_FORM_sec_offset;
9781 /* FALLTHRU */
9782 case dw_val_class_vms_delta:
9783 case dw_val_class_offset:
9784 switch (DWARF_OFFSET_SIZE)
9785 {
9786 case 4:
9787 return DW_FORM_data4;
9788 case 8:
9789 return DW_FORM_data8;
9790 default:
9791 gcc_unreachable ();
9792 }
9793 case dw_val_class_loc:
9794 if (dwarf_version >= 4)
9795 return DW_FORM_exprloc;
9796 switch (constant_size (size_of_locs (AT_loc (a))))
9797 {
9798 case 1:
9799 return DW_FORM_block1;
9800 case 2:
9801 return DW_FORM_block2;
9802 case 4:
9803 return DW_FORM_block4;
9804 default:
9805 gcc_unreachable ();
9806 }
9807 case dw_val_class_const:
9808 return DW_FORM_sdata;
9809 case dw_val_class_unsigned_const:
9810 switch (constant_size (AT_unsigned (a)))
9811 {
9812 case 1:
9813 return DW_FORM_data1;
9814 case 2:
9815 return DW_FORM_data2;
9816 case 4:
9817 /* In DWARF3 DW_AT_data_member_location with
9818 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9819 constant, so we need to use DW_FORM_udata if we need
9820 a large constant. */
9821 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9822 return DW_FORM_udata;
9823 return DW_FORM_data4;
9824 case 8:
9825 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9826 return DW_FORM_udata;
9827 return DW_FORM_data8;
9828 default:
9829 gcc_unreachable ();
9830 }
9831 case dw_val_class_const_implicit:
9832 case dw_val_class_unsigned_const_implicit:
9833 case dw_val_class_file_implicit:
9834 return DW_FORM_implicit_const;
9835 case dw_val_class_const_double:
9836 switch (HOST_BITS_PER_WIDE_INT)
9837 {
9838 case 8:
9839 return DW_FORM_data2;
9840 case 16:
9841 return DW_FORM_data4;
9842 case 32:
9843 return DW_FORM_data8;
9844 case 64:
9845 if (dwarf_version >= 5)
9846 return DW_FORM_data16;
9847 /* FALLTHRU */
9848 default:
9849 return DW_FORM_block1;
9850 }
9851 case dw_val_class_wide_int:
9852 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9853 {
9854 case 8:
9855 return DW_FORM_data1;
9856 case 16:
9857 return DW_FORM_data2;
9858 case 32:
9859 return DW_FORM_data4;
9860 case 64:
9861 return DW_FORM_data8;
9862 case 128:
9863 if (dwarf_version >= 5)
9864 return DW_FORM_data16;
9865 /* FALLTHRU */
9866 default:
9867 return DW_FORM_block1;
9868 }
9869 case dw_val_class_symview:
9870 /* ??? We might use uleb128, but then we'd have to compute
9871 .debug_info offsets in the assembler. */
9872 if (symview_upper_bound <= 0xff)
9873 return DW_FORM_data1;
9874 else if (symview_upper_bound <= 0xffff)
9875 return DW_FORM_data2;
9876 else if (symview_upper_bound <= 0xffffffff)
9877 return DW_FORM_data4;
9878 else
9879 return DW_FORM_data8;
9880 case dw_val_class_vec:
9881 switch (constant_size (a->dw_attr_val.v.val_vec.length
9882 * a->dw_attr_val.v.val_vec.elt_size))
9883 {
9884 case 1:
9885 return DW_FORM_block1;
9886 case 2:
9887 return DW_FORM_block2;
9888 case 4:
9889 return DW_FORM_block4;
9890 default:
9891 gcc_unreachable ();
9892 }
9893 case dw_val_class_flag:
9894 if (dwarf_version >= 4)
9895 {
9896 /* Currently all add_AT_flag calls pass in 1 as last argument,
9897 so DW_FORM_flag_present can be used. If that ever changes,
9898 we'll need to use DW_FORM_flag and have some optimization
9899 in build_abbrev_table that will change those to
9900 DW_FORM_flag_present if it is set to 1 in all DIEs using
9901 the same abbrev entry. */
9902 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9903 return DW_FORM_flag_present;
9904 }
9905 return DW_FORM_flag;
9906 case dw_val_class_die_ref:
9907 if (AT_ref_external (a))
9908 {
9909 if (AT_ref (a)->comdat_type_p)
9910 return DW_FORM_ref_sig8;
9911 else
9912 return DW_FORM_ref_addr;
9913 }
9914 else
9915 return DW_FORM_ref;
9916 case dw_val_class_fde_ref:
9917 return DW_FORM_data;
9918 case dw_val_class_lbl_id:
9919 return (AT_index (a) == NOT_INDEXED
9920 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9921 case dw_val_class_lineptr:
9922 case dw_val_class_macptr:
9923 case dw_val_class_loclistsptr:
9924 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9925 case dw_val_class_str:
9926 return AT_string_form (a);
9927 case dw_val_class_file:
9928 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9929 {
9930 case 1:
9931 return DW_FORM_data1;
9932 case 2:
9933 return DW_FORM_data2;
9934 case 4:
9935 return DW_FORM_data4;
9936 default:
9937 gcc_unreachable ();
9938 }
9939
9940 case dw_val_class_data8:
9941 return DW_FORM_data8;
9942
9943 case dw_val_class_high_pc:
9944 switch (DWARF2_ADDR_SIZE)
9945 {
9946 case 1:
9947 return DW_FORM_data1;
9948 case 2:
9949 return DW_FORM_data2;
9950 case 4:
9951 return DW_FORM_data4;
9952 case 8:
9953 return DW_FORM_data8;
9954 default:
9955 gcc_unreachable ();
9956 }
9957
9958 case dw_val_class_discr_value:
9959 return (a->dw_attr_val.v.val_discr_value.pos
9960 ? DW_FORM_udata
9961 : DW_FORM_sdata);
9962 case dw_val_class_discr_list:
9963 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9964 {
9965 case 1:
9966 return DW_FORM_block1;
9967 case 2:
9968 return DW_FORM_block2;
9969 case 4:
9970 return DW_FORM_block4;
9971 default:
9972 gcc_unreachable ();
9973 }
9974
9975 default:
9976 gcc_unreachable ();
9977 }
9978 }
9979
9980 /* Output the encoding of an attribute value. */
9981
9982 static void
9983 output_value_format (dw_attr_node *a)
9984 {
9985 enum dwarf_form form = value_format (a);
9986
9987 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9988 }
9989
9990 /* Given a die and id, produce the appropriate abbreviations. */
9991
9992 static void
9993 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9994 {
9995 unsigned ix;
9996 dw_attr_node *a_attr;
9997
9998 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9999 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
10000 dwarf_tag_name (abbrev->die_tag));
10001
10002 if (abbrev->die_child != NULL)
10003 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10004 else
10005 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10006
10007 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10008 {
10009 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10010 dwarf_attr_name (a_attr->dw_attr));
10011 output_value_format (a_attr);
10012 if (value_format (a_attr) == DW_FORM_implicit_const)
10013 {
10014 if (AT_class (a_attr) == dw_val_class_file_implicit)
10015 {
10016 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10017 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10018 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10019 }
10020 else
10021 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10022 }
10023 }
10024
10025 dw2_asm_output_data (1, 0, NULL);
10026 dw2_asm_output_data (1, 0, NULL);
10027 }
10028
10029
10030 /* Output the .debug_abbrev section which defines the DIE abbreviation
10031 table. */
10032
10033 static void
10034 output_abbrev_section (void)
10035 {
10036 unsigned int abbrev_id;
10037 dw_die_ref abbrev;
10038
10039 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10040 if (abbrev_id != 0)
10041 output_die_abbrevs (abbrev_id, abbrev);
10042
10043 /* Terminate the table. */
10044 dw2_asm_output_data (1, 0, NULL);
10045 }
10046
10047 /* Return a new location list, given the begin and end range, and the
10048 expression. */
10049
10050 static inline dw_loc_list_ref
10051 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10052 const char *end, var_loc_view vend,
10053 const char *section)
10054 {
10055 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10056
10057 retlist->begin = begin;
10058 retlist->begin_entry = NULL;
10059 retlist->end = end;
10060 retlist->expr = expr;
10061 retlist->section = section;
10062 retlist->vbegin = vbegin;
10063 retlist->vend = vend;
10064
10065 return retlist;
10066 }
10067
10068 /* Return true iff there's any nonzero view number in the loc list.
10069
10070 ??? When views are not enabled, we'll often extend a single range
10071 to the entire function, so that we emit a single location
10072 expression rather than a location list. With views, even with a
10073 single range, we'll output a list if start or end have a nonzero
10074 view. If we change this, we may want to stop splitting a single
10075 range in dw_loc_list just because of a nonzero view, even if it
10076 straddles across hot/cold partitions. */
10077
10078 static bool
10079 loc_list_has_views (dw_loc_list_ref list)
10080 {
10081 if (!debug_variable_location_views)
10082 return false;
10083
10084 for (dw_loc_list_ref loc = list;
10085 loc != NULL; loc = loc->dw_loc_next)
10086 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10087 return true;
10088
10089 return false;
10090 }
10091
10092 /* Generate a new internal symbol for this location list node, if it
10093 hasn't got one yet. */
10094
10095 static inline void
10096 gen_llsym (dw_loc_list_ref list)
10097 {
10098 gcc_assert (!list->ll_symbol);
10099 list->ll_symbol = gen_internal_sym ("LLST");
10100
10101 if (!loc_list_has_views (list))
10102 return;
10103
10104 if (dwarf2out_locviews_in_attribute ())
10105 {
10106 /* Use the same label_num for the view list. */
10107 label_num--;
10108 list->vl_symbol = gen_internal_sym ("LVUS");
10109 }
10110 else
10111 list->vl_symbol = list->ll_symbol;
10112 }
10113
10114 /* Generate a symbol for the list, but only if we really want to emit
10115 it as a list. */
10116
10117 static inline void
10118 maybe_gen_llsym (dw_loc_list_ref list)
10119 {
10120 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10121 return;
10122
10123 gen_llsym (list);
10124 }
10125
10126 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10127 NULL, don't consider size of the location expression. If we're not
10128 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10129 representation in *SIZEP. */
10130
10131 static bool
10132 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10133 {
10134 /* Don't output an entry that starts and ends at the same address. */
10135 if (strcmp (curr->begin, curr->end) == 0
10136 && curr->vbegin == curr->vend && !curr->force)
10137 return true;
10138
10139 if (!sizep)
10140 return false;
10141
10142 unsigned long size = size_of_locs (curr->expr);
10143
10144 /* If the expression is too large, drop it on the floor. We could
10145 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10146 in the expression, but >= 64KB expressions for a single value
10147 in a single range are unlikely very useful. */
10148 if (dwarf_version < 5 && size > 0xffff)
10149 return true;
10150
10151 *sizep = size;
10152
10153 return false;
10154 }
10155
10156 /* Output a view pair loclist entry for CURR, if it requires one. */
10157
10158 static void
10159 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10160 {
10161 if (!dwarf2out_locviews_in_loclist ())
10162 return;
10163
10164 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10165 return;
10166
10167 #ifdef DW_LLE_view_pair
10168 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10169
10170 if (dwarf2out_as_locview_support)
10171 {
10172 if (ZERO_VIEW_P (curr->vbegin))
10173 dw2_asm_output_data_uleb128 (0, "Location view begin");
10174 else
10175 {
10176 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10177 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10178 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10179 }
10180
10181 if (ZERO_VIEW_P (curr->vend))
10182 dw2_asm_output_data_uleb128 (0, "Location view end");
10183 else
10184 {
10185 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10186 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10187 dw2_asm_output_symname_uleb128 (label, "Location view end");
10188 }
10189 }
10190 else
10191 {
10192 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10193 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10194 }
10195 #endif /* DW_LLE_view_pair */
10196
10197 return;
10198 }
10199
10200 /* Output the location list given to us. */
10201
10202 static void
10203 output_loc_list (dw_loc_list_ref list_head)
10204 {
10205 int vcount = 0, lcount = 0;
10206
10207 if (list_head->emitted)
10208 return;
10209 list_head->emitted = true;
10210
10211 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10212 {
10213 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10214
10215 for (dw_loc_list_ref curr = list_head; curr != NULL;
10216 curr = curr->dw_loc_next)
10217 {
10218 unsigned long size;
10219
10220 if (skip_loc_list_entry (curr, &size))
10221 continue;
10222
10223 vcount++;
10224
10225 /* ?? dwarf_split_debug_info? */
10226 if (dwarf2out_as_locview_support)
10227 {
10228 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10229
10230 if (!ZERO_VIEW_P (curr->vbegin))
10231 {
10232 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10233 dw2_asm_output_symname_uleb128 (label,
10234 "View list begin (%s)",
10235 list_head->vl_symbol);
10236 }
10237 else
10238 dw2_asm_output_data_uleb128 (0,
10239 "View list begin (%s)",
10240 list_head->vl_symbol);
10241
10242 if (!ZERO_VIEW_P (curr->vend))
10243 {
10244 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10245 dw2_asm_output_symname_uleb128 (label,
10246 "View list end (%s)",
10247 list_head->vl_symbol);
10248 }
10249 else
10250 dw2_asm_output_data_uleb128 (0,
10251 "View list end (%s)",
10252 list_head->vl_symbol);
10253 }
10254 else
10255 {
10256 dw2_asm_output_data_uleb128 (curr->vbegin,
10257 "View list begin (%s)",
10258 list_head->vl_symbol);
10259 dw2_asm_output_data_uleb128 (curr->vend,
10260 "View list end (%s)",
10261 list_head->vl_symbol);
10262 }
10263 }
10264 }
10265
10266 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10267
10268 const char *last_section = NULL;
10269 const char *base_label = NULL;
10270
10271 /* Walk the location list, and output each range + expression. */
10272 for (dw_loc_list_ref curr = list_head; curr != NULL;
10273 curr = curr->dw_loc_next)
10274 {
10275 unsigned long size;
10276
10277 /* Skip this entry? If we skip it here, we must skip it in the
10278 view list above as well. */
10279 if (skip_loc_list_entry (curr, &size))
10280 continue;
10281
10282 lcount++;
10283
10284 if (dwarf_version >= 5)
10285 {
10286 if (dwarf_split_debug_info)
10287 {
10288 dwarf2out_maybe_output_loclist_view_pair (curr);
10289 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10290 uleb128 index into .debug_addr and uleb128 length. */
10291 dw2_asm_output_data (1, DW_LLE_startx_length,
10292 "DW_LLE_startx_length (%s)",
10293 list_head->ll_symbol);
10294 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10295 "Location list range start index "
10296 "(%s)", curr->begin);
10297 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10298 For that case we probably need to emit DW_LLE_startx_endx,
10299 but we'd need 2 .debug_addr entries rather than just one. */
10300 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10301 "Location list length (%s)",
10302 list_head->ll_symbol);
10303 }
10304 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10305 {
10306 dwarf2out_maybe_output_loclist_view_pair (curr);
10307 /* If all code is in .text section, the base address is
10308 already provided by the CU attributes. Use
10309 DW_LLE_offset_pair where both addresses are uleb128 encoded
10310 offsets against that base. */
10311 dw2_asm_output_data (1, DW_LLE_offset_pair,
10312 "DW_LLE_offset_pair (%s)",
10313 list_head->ll_symbol);
10314 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10315 "Location list begin address (%s)",
10316 list_head->ll_symbol);
10317 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10318 "Location list end address (%s)",
10319 list_head->ll_symbol);
10320 }
10321 else if (HAVE_AS_LEB128)
10322 {
10323 /* Otherwise, find out how many consecutive entries could share
10324 the same base entry. If just one, emit DW_LLE_start_length,
10325 otherwise emit DW_LLE_base_address for the base address
10326 followed by a series of DW_LLE_offset_pair. */
10327 if (last_section == NULL || curr->section != last_section)
10328 {
10329 dw_loc_list_ref curr2;
10330 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10331 curr2 = curr2->dw_loc_next)
10332 {
10333 if (strcmp (curr2->begin, curr2->end) == 0
10334 && !curr2->force)
10335 continue;
10336 break;
10337 }
10338 if (curr2 == NULL || curr->section != curr2->section)
10339 last_section = NULL;
10340 else
10341 {
10342 last_section = curr->section;
10343 base_label = curr->begin;
10344 dw2_asm_output_data (1, DW_LLE_base_address,
10345 "DW_LLE_base_address (%s)",
10346 list_head->ll_symbol);
10347 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10348 "Base address (%s)",
10349 list_head->ll_symbol);
10350 }
10351 }
10352 /* Only one entry with the same base address. Use
10353 DW_LLE_start_length with absolute address and uleb128
10354 length. */
10355 if (last_section == NULL)
10356 {
10357 dwarf2out_maybe_output_loclist_view_pair (curr);
10358 dw2_asm_output_data (1, DW_LLE_start_length,
10359 "DW_LLE_start_length (%s)",
10360 list_head->ll_symbol);
10361 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10362 "Location list begin address (%s)",
10363 list_head->ll_symbol);
10364 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10365 "Location list length "
10366 "(%s)", list_head->ll_symbol);
10367 }
10368 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10369 DW_LLE_base_address. */
10370 else
10371 {
10372 dwarf2out_maybe_output_loclist_view_pair (curr);
10373 dw2_asm_output_data (1, DW_LLE_offset_pair,
10374 "DW_LLE_offset_pair (%s)",
10375 list_head->ll_symbol);
10376 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10377 "Location list begin address "
10378 "(%s)", list_head->ll_symbol);
10379 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10380 "Location list end address "
10381 "(%s)", list_head->ll_symbol);
10382 }
10383 }
10384 /* The assembler does not support .uleb128 directive. Emit
10385 DW_LLE_start_end with a pair of absolute addresses. */
10386 else
10387 {
10388 dwarf2out_maybe_output_loclist_view_pair (curr);
10389 dw2_asm_output_data (1, DW_LLE_start_end,
10390 "DW_LLE_start_end (%s)",
10391 list_head->ll_symbol);
10392 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10393 "Location list begin address (%s)",
10394 list_head->ll_symbol);
10395 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10396 "Location list end address (%s)",
10397 list_head->ll_symbol);
10398 }
10399 }
10400 else if (dwarf_split_debug_info)
10401 {
10402 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10403 and 4 byte length. */
10404 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10405 "Location list start/length entry (%s)",
10406 list_head->ll_symbol);
10407 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10408 "Location list range start index (%s)",
10409 curr->begin);
10410 /* The length field is 4 bytes. If we ever need to support
10411 an 8-byte length, we can add a new DW_LLE code or fall back
10412 to DW_LLE_GNU_start_end_entry. */
10413 dw2_asm_output_delta (4, curr->end, curr->begin,
10414 "Location list range length (%s)",
10415 list_head->ll_symbol);
10416 }
10417 else if (!have_multiple_function_sections)
10418 {
10419 /* Pair of relative addresses against start of text section. */
10420 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10421 "Location list begin address (%s)",
10422 list_head->ll_symbol);
10423 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10424 "Location list end address (%s)",
10425 list_head->ll_symbol);
10426 }
10427 else
10428 {
10429 /* Pair of absolute addresses. */
10430 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10431 "Location list begin address (%s)",
10432 list_head->ll_symbol);
10433 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10434 "Location list end address (%s)",
10435 list_head->ll_symbol);
10436 }
10437
10438 /* Output the block length for this list of location operations. */
10439 if (dwarf_version >= 5)
10440 dw2_asm_output_data_uleb128 (size, "Location expression size");
10441 else
10442 {
10443 gcc_assert (size <= 0xffff);
10444 dw2_asm_output_data (2, size, "Location expression size");
10445 }
10446
10447 output_loc_sequence (curr->expr, -1);
10448 }
10449
10450 /* And finally list termination. */
10451 if (dwarf_version >= 5)
10452 dw2_asm_output_data (1, DW_LLE_end_of_list,
10453 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10454 else if (dwarf_split_debug_info)
10455 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10456 "Location list terminator (%s)",
10457 list_head->ll_symbol);
10458 else
10459 {
10460 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10461 "Location list terminator begin (%s)",
10462 list_head->ll_symbol);
10463 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10464 "Location list terminator end (%s)",
10465 list_head->ll_symbol);
10466 }
10467
10468 gcc_assert (!list_head->vl_symbol
10469 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10470 }
10471
10472 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10473 section. Emit a relocated reference if val_entry is NULL, otherwise,
10474 emit an indirect reference. */
10475
10476 static void
10477 output_range_list_offset (dw_attr_node *a)
10478 {
10479 const char *name = dwarf_attr_name (a->dw_attr);
10480
10481 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10482 {
10483 if (dwarf_version >= 5)
10484 {
10485 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10486 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10487 debug_ranges_section, "%s", name);
10488 }
10489 else
10490 {
10491 char *p = strchr (ranges_section_label, '\0');
10492 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10493 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10494 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10495 debug_ranges_section, "%s", name);
10496 *p = '\0';
10497 }
10498 }
10499 else if (dwarf_version >= 5)
10500 {
10501 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10502 gcc_assert (rnglist_idx);
10503 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10504 }
10505 else
10506 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10507 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10508 "%s (offset from %s)", name, ranges_section_label);
10509 }
10510
10511 /* Output the offset into the debug_loc section. */
10512
10513 static void
10514 output_loc_list_offset (dw_attr_node *a)
10515 {
10516 char *sym = AT_loc_list (a)->ll_symbol;
10517
10518 gcc_assert (sym);
10519 if (!dwarf_split_debug_info)
10520 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10521 "%s", dwarf_attr_name (a->dw_attr));
10522 else if (dwarf_version >= 5)
10523 {
10524 gcc_assert (AT_loc_list (a)->num_assigned);
10525 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10526 dwarf_attr_name (a->dw_attr),
10527 sym);
10528 }
10529 else
10530 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10531 "%s", dwarf_attr_name (a->dw_attr));
10532 }
10533
10534 /* Output the offset into the debug_loc section. */
10535
10536 static void
10537 output_view_list_offset (dw_attr_node *a)
10538 {
10539 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10540
10541 gcc_assert (sym);
10542 if (dwarf_split_debug_info)
10543 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10544 "%s", dwarf_attr_name (a->dw_attr));
10545 else
10546 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10547 "%s", dwarf_attr_name (a->dw_attr));
10548 }
10549
10550 /* Output an attribute's index or value appropriately. */
10551
10552 static void
10553 output_attr_index_or_value (dw_attr_node *a)
10554 {
10555 const char *name = dwarf_attr_name (a->dw_attr);
10556
10557 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10558 {
10559 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10560 return;
10561 }
10562 switch (AT_class (a))
10563 {
10564 case dw_val_class_addr:
10565 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10566 break;
10567 case dw_val_class_high_pc:
10568 case dw_val_class_lbl_id:
10569 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10570 break;
10571 default:
10572 gcc_unreachable ();
10573 }
10574 }
10575
10576 /* Output a type signature. */
10577
10578 static inline void
10579 output_signature (const char *sig, const char *name)
10580 {
10581 int i;
10582
10583 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10584 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10585 }
10586
10587 /* Output a discriminant value. */
10588
10589 static inline void
10590 output_discr_value (dw_discr_value *discr_value, const char *name)
10591 {
10592 if (discr_value->pos)
10593 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10594 else
10595 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10596 }
10597
10598 /* Output the DIE and its attributes. Called recursively to generate
10599 the definitions of each child DIE. */
10600
10601 static void
10602 output_die (dw_die_ref die)
10603 {
10604 dw_attr_node *a;
10605 dw_die_ref c;
10606 unsigned long size;
10607 unsigned ix;
10608
10609 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10610 (unsigned long)die->die_offset,
10611 dwarf_tag_name (die->die_tag));
10612
10613 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10614 {
10615 const char *name = dwarf_attr_name (a->dw_attr);
10616
10617 switch (AT_class (a))
10618 {
10619 case dw_val_class_addr:
10620 output_attr_index_or_value (a);
10621 break;
10622
10623 case dw_val_class_offset:
10624 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10625 "%s", name);
10626 break;
10627
10628 case dw_val_class_range_list:
10629 output_range_list_offset (a);
10630 break;
10631
10632 case dw_val_class_loc:
10633 size = size_of_locs (AT_loc (a));
10634
10635 /* Output the block length for this list of location operations. */
10636 if (dwarf_version >= 4)
10637 dw2_asm_output_data_uleb128 (size, "%s", name);
10638 else
10639 dw2_asm_output_data (constant_size (size), size, "%s", name);
10640
10641 output_loc_sequence (AT_loc (a), -1);
10642 break;
10643
10644 case dw_val_class_const:
10645 /* ??? It would be slightly more efficient to use a scheme like is
10646 used for unsigned constants below, but gdb 4.x does not sign
10647 extend. Gdb 5.x does sign extend. */
10648 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10649 break;
10650
10651 case dw_val_class_unsigned_const:
10652 {
10653 int csize = constant_size (AT_unsigned (a));
10654 if (dwarf_version == 3
10655 && a->dw_attr == DW_AT_data_member_location
10656 && csize >= 4)
10657 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10658 else
10659 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10660 }
10661 break;
10662
10663 case dw_val_class_symview:
10664 {
10665 int vsize;
10666 if (symview_upper_bound <= 0xff)
10667 vsize = 1;
10668 else if (symview_upper_bound <= 0xffff)
10669 vsize = 2;
10670 else if (symview_upper_bound <= 0xffffffff)
10671 vsize = 4;
10672 else
10673 vsize = 8;
10674 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10675 "%s", name);
10676 }
10677 break;
10678
10679 case dw_val_class_const_implicit:
10680 if (flag_debug_asm)
10681 fprintf (asm_out_file, "\t\t\t%s %s ("
10682 HOST_WIDE_INT_PRINT_DEC ")\n",
10683 ASM_COMMENT_START, name, AT_int (a));
10684 break;
10685
10686 case dw_val_class_unsigned_const_implicit:
10687 if (flag_debug_asm)
10688 fprintf (asm_out_file, "\t\t\t%s %s ("
10689 HOST_WIDE_INT_PRINT_HEX ")\n",
10690 ASM_COMMENT_START, name, AT_unsigned (a));
10691 break;
10692
10693 case dw_val_class_const_double:
10694 {
10695 unsigned HOST_WIDE_INT first, second;
10696
10697 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10698 dw2_asm_output_data (1,
10699 HOST_BITS_PER_DOUBLE_INT
10700 / HOST_BITS_PER_CHAR,
10701 NULL);
10702
10703 if (WORDS_BIG_ENDIAN)
10704 {
10705 first = a->dw_attr_val.v.val_double.high;
10706 second = a->dw_attr_val.v.val_double.low;
10707 }
10708 else
10709 {
10710 first = a->dw_attr_val.v.val_double.low;
10711 second = a->dw_attr_val.v.val_double.high;
10712 }
10713
10714 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10715 first, "%s", name);
10716 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10717 second, NULL);
10718 }
10719 break;
10720
10721 case dw_val_class_wide_int:
10722 {
10723 int i;
10724 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10725 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10726 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10727 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10728 * l, NULL);
10729
10730 if (WORDS_BIG_ENDIAN)
10731 for (i = len - 1; i >= 0; --i)
10732 {
10733 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10734 "%s", name);
10735 name = "";
10736 }
10737 else
10738 for (i = 0; i < len; ++i)
10739 {
10740 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10741 "%s", name);
10742 name = "";
10743 }
10744 }
10745 break;
10746
10747 case dw_val_class_vec:
10748 {
10749 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10750 unsigned int len = a->dw_attr_val.v.val_vec.length;
10751 unsigned int i;
10752 unsigned char *p;
10753
10754 dw2_asm_output_data (constant_size (len * elt_size),
10755 len * elt_size, "%s", name);
10756 if (elt_size > sizeof (HOST_WIDE_INT))
10757 {
10758 elt_size /= 2;
10759 len *= 2;
10760 }
10761 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10762 i < len;
10763 i++, p += elt_size)
10764 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10765 "fp or vector constant word %u", i);
10766 break;
10767 }
10768
10769 case dw_val_class_flag:
10770 if (dwarf_version >= 4)
10771 {
10772 /* Currently all add_AT_flag calls pass in 1 as last argument,
10773 so DW_FORM_flag_present can be used. If that ever changes,
10774 we'll need to use DW_FORM_flag and have some optimization
10775 in build_abbrev_table that will change those to
10776 DW_FORM_flag_present if it is set to 1 in all DIEs using
10777 the same abbrev entry. */
10778 gcc_assert (AT_flag (a) == 1);
10779 if (flag_debug_asm)
10780 fprintf (asm_out_file, "\t\t\t%s %s\n",
10781 ASM_COMMENT_START, name);
10782 break;
10783 }
10784 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10785 break;
10786
10787 case dw_val_class_loc_list:
10788 output_loc_list_offset (a);
10789 break;
10790
10791 case dw_val_class_view_list:
10792 output_view_list_offset (a);
10793 break;
10794
10795 case dw_val_class_die_ref:
10796 if (AT_ref_external (a))
10797 {
10798 if (AT_ref (a)->comdat_type_p)
10799 {
10800 comdat_type_node *type_node
10801 = AT_ref (a)->die_id.die_type_node;
10802
10803 gcc_assert (type_node);
10804 output_signature (type_node->signature, name);
10805 }
10806 else
10807 {
10808 const char *sym = AT_ref (a)->die_id.die_symbol;
10809 int size;
10810
10811 gcc_assert (sym);
10812 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10813 length, whereas in DWARF3 it's always sized as an
10814 offset. */
10815 if (dwarf_version == 2)
10816 size = DWARF2_ADDR_SIZE;
10817 else
10818 size = DWARF_OFFSET_SIZE;
10819 /* ??? We cannot unconditionally output die_offset if
10820 non-zero - others might create references to those
10821 DIEs via symbols.
10822 And we do not clear its DIE offset after outputting it
10823 (and the label refers to the actual DIEs, not the
10824 DWARF CU unit header which is when using label + offset
10825 would be the correct thing to do).
10826 ??? This is the reason for the with_offset flag. */
10827 if (AT_ref (a)->with_offset)
10828 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10829 debug_info_section, "%s", name);
10830 else
10831 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10832 name);
10833 }
10834 }
10835 else
10836 {
10837 gcc_assert (AT_ref (a)->die_offset);
10838 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10839 "%s", name);
10840 }
10841 break;
10842
10843 case dw_val_class_fde_ref:
10844 {
10845 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10846
10847 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10848 a->dw_attr_val.v.val_fde_index * 2);
10849 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10850 "%s", name);
10851 }
10852 break;
10853
10854 case dw_val_class_vms_delta:
10855 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10856 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10857 AT_vms_delta2 (a), AT_vms_delta1 (a),
10858 "%s", name);
10859 #else
10860 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10861 AT_vms_delta2 (a), AT_vms_delta1 (a),
10862 "%s", name);
10863 #endif
10864 break;
10865
10866 case dw_val_class_lbl_id:
10867 output_attr_index_or_value (a);
10868 break;
10869
10870 case dw_val_class_lineptr:
10871 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10872 debug_line_section, "%s", name);
10873 break;
10874
10875 case dw_val_class_macptr:
10876 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10877 debug_macinfo_section, "%s", name);
10878 break;
10879
10880 case dw_val_class_loclistsptr:
10881 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10882 debug_loc_section, "%s", name);
10883 break;
10884
10885 case dw_val_class_str:
10886 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10887 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10888 a->dw_attr_val.v.val_str->label,
10889 debug_str_section,
10890 "%s: \"%s\"", name, AT_string (a));
10891 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10892 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10893 a->dw_attr_val.v.val_str->label,
10894 debug_line_str_section,
10895 "%s: \"%s\"", name, AT_string (a));
10896 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10897 dw2_asm_output_data_uleb128 (AT_index (a),
10898 "%s: \"%s\"", name, AT_string (a));
10899 else
10900 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10901 break;
10902
10903 case dw_val_class_file:
10904 {
10905 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10906
10907 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10908 a->dw_attr_val.v.val_file->filename);
10909 break;
10910 }
10911
10912 case dw_val_class_file_implicit:
10913 if (flag_debug_asm)
10914 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10915 ASM_COMMENT_START, name,
10916 maybe_emit_file (a->dw_attr_val.v.val_file),
10917 a->dw_attr_val.v.val_file->filename);
10918 break;
10919
10920 case dw_val_class_data8:
10921 {
10922 int i;
10923
10924 for (i = 0; i < 8; i++)
10925 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10926 i == 0 ? "%s" : NULL, name);
10927 break;
10928 }
10929
10930 case dw_val_class_high_pc:
10931 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10932 get_AT_low_pc (die), "DW_AT_high_pc");
10933 break;
10934
10935 case dw_val_class_discr_value:
10936 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10937 break;
10938
10939 case dw_val_class_discr_list:
10940 {
10941 dw_discr_list_ref list = AT_discr_list (a);
10942 const int size = size_of_discr_list (list);
10943
10944 /* This is a block, so output its length first. */
10945 dw2_asm_output_data (constant_size (size), size,
10946 "%s: block size", name);
10947
10948 for (; list != NULL; list = list->dw_discr_next)
10949 {
10950 /* One byte for the discriminant value descriptor, and then as
10951 many LEB128 numbers as required. */
10952 if (list->dw_discr_range)
10953 dw2_asm_output_data (1, DW_DSC_range,
10954 "%s: DW_DSC_range", name);
10955 else
10956 dw2_asm_output_data (1, DW_DSC_label,
10957 "%s: DW_DSC_label", name);
10958
10959 output_discr_value (&list->dw_discr_lower_bound, name);
10960 if (list->dw_discr_range)
10961 output_discr_value (&list->dw_discr_upper_bound, name);
10962 }
10963 break;
10964 }
10965
10966 default:
10967 gcc_unreachable ();
10968 }
10969 }
10970
10971 FOR_EACH_CHILD (die, c, output_die (c));
10972
10973 /* Add null byte to terminate sibling list. */
10974 if (die->die_child != NULL)
10975 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10976 (unsigned long) die->die_offset);
10977 }
10978
10979 /* Output the dwarf version number. */
10980
10981 static void
10982 output_dwarf_version ()
10983 {
10984 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10985 views in loclist. That will change eventually. */
10986 if (dwarf_version == 6)
10987 {
10988 static bool once;
10989 if (!once)
10990 {
10991 warning (0, "%<-gdwarf-6%> is output as version 5 with "
10992 "incompatibilities");
10993 once = true;
10994 }
10995 dw2_asm_output_data (2, 5, "DWARF version number");
10996 }
10997 else
10998 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10999 }
11000
11001 /* Output the compilation unit that appears at the beginning of the
11002 .debug_info section, and precedes the DIE descriptions. */
11003
11004 static void
11005 output_compilation_unit_header (enum dwarf_unit_type ut)
11006 {
11007 if (!XCOFF_DEBUGGING_INFO)
11008 {
11009 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11010 dw2_asm_output_data (4, 0xffffffff,
11011 "Initial length escape value indicating 64-bit DWARF extension");
11012 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11013 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11014 "Length of Compilation Unit Info");
11015 }
11016
11017 output_dwarf_version ();
11018 if (dwarf_version >= 5)
11019 {
11020 const char *name;
11021 switch (ut)
11022 {
11023 case DW_UT_compile: name = "DW_UT_compile"; break;
11024 case DW_UT_type: name = "DW_UT_type"; break;
11025 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11026 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11027 default: gcc_unreachable ();
11028 }
11029 dw2_asm_output_data (1, ut, "%s", name);
11030 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11031 }
11032 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11033 debug_abbrev_section,
11034 "Offset Into Abbrev. Section");
11035 if (dwarf_version < 5)
11036 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11037 }
11038
11039 /* Output the compilation unit DIE and its children. */
11040
11041 static void
11042 output_comp_unit (dw_die_ref die, int output_if_empty,
11043 const unsigned char *dwo_id)
11044 {
11045 const char *secname, *oldsym;
11046 char *tmp;
11047
11048 /* Unless we are outputting main CU, we may throw away empty ones. */
11049 if (!output_if_empty && die->die_child == NULL)
11050 return;
11051
11052 /* Even if there are no children of this DIE, we must output the information
11053 about the compilation unit. Otherwise, on an empty translation unit, we
11054 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11055 will then complain when examining the file. First mark all the DIEs in
11056 this CU so we know which get local refs. */
11057 mark_dies (die);
11058
11059 external_ref_hash_type *extern_map = optimize_external_refs (die);
11060
11061 /* For now, optimize only the main CU, in order to optimize the rest
11062 we'd need to see all of them earlier. Leave the rest for post-linking
11063 tools like DWZ. */
11064 if (die == comp_unit_die ())
11065 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11066
11067 build_abbrev_table (die, extern_map);
11068
11069 optimize_abbrev_table ();
11070
11071 delete extern_map;
11072
11073 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11074 next_die_offset = (dwo_id
11075 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11076 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11077 calc_die_sizes (die);
11078
11079 oldsym = die->die_id.die_symbol;
11080 if (oldsym && die->comdat_type_p)
11081 {
11082 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11083
11084 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11085 secname = tmp;
11086 die->die_id.die_symbol = NULL;
11087 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11088 }
11089 else
11090 {
11091 switch_to_section (debug_info_section);
11092 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11093 info_section_emitted = true;
11094 }
11095
11096 /* For LTO cross unit DIE refs we want a symbol on the start of the
11097 debuginfo section, not on the CU DIE. */
11098 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11099 {
11100 /* ??? No way to get visibility assembled without a decl. */
11101 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11102 get_identifier (oldsym), char_type_node);
11103 TREE_PUBLIC (decl) = true;
11104 TREE_STATIC (decl) = true;
11105 DECL_ARTIFICIAL (decl) = true;
11106 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11107 DECL_VISIBILITY_SPECIFIED (decl) = true;
11108 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11109 #ifdef ASM_WEAKEN_LABEL
11110 /* We prefer a .weak because that handles duplicates from duplicate
11111 archive members in a graceful way. */
11112 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11113 #else
11114 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11115 #endif
11116 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11117 }
11118
11119 /* Output debugging information. */
11120 output_compilation_unit_header (dwo_id
11121 ? DW_UT_split_compile : DW_UT_compile);
11122 if (dwarf_version >= 5)
11123 {
11124 if (dwo_id != NULL)
11125 for (int i = 0; i < 8; i++)
11126 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11127 }
11128 output_die (die);
11129
11130 /* Leave the marks on the main CU, so we can check them in
11131 output_pubnames. */
11132 if (oldsym)
11133 {
11134 unmark_dies (die);
11135 die->die_id.die_symbol = oldsym;
11136 }
11137 }
11138
11139 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11140 and .debug_pubtypes. This is configured per-target, but can be
11141 overridden by the -gpubnames or -gno-pubnames options. */
11142
11143 static inline bool
11144 want_pubnames (void)
11145 {
11146 if (debug_info_level <= DINFO_LEVEL_TERSE
11147 /* Names and types go to the early debug part only. */
11148 || in_lto_p)
11149 return false;
11150 if (debug_generate_pub_sections != -1)
11151 return debug_generate_pub_sections;
11152 return targetm.want_debug_pub_sections;
11153 }
11154
11155 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11156
11157 static void
11158 add_AT_pubnames (dw_die_ref die)
11159 {
11160 if (want_pubnames ())
11161 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11162 }
11163
11164 /* Add a string attribute value to a skeleton DIE. */
11165
11166 static inline void
11167 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11168 const char *str)
11169 {
11170 dw_attr_node attr;
11171 struct indirect_string_node *node;
11172
11173 if (! skeleton_debug_str_hash)
11174 skeleton_debug_str_hash
11175 = hash_table<indirect_string_hasher>::create_ggc (10);
11176
11177 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11178 find_string_form (node);
11179 if (node->form == dwarf_FORM (DW_FORM_strx))
11180 node->form = DW_FORM_strp;
11181
11182 attr.dw_attr = attr_kind;
11183 attr.dw_attr_val.val_class = dw_val_class_str;
11184 attr.dw_attr_val.val_entry = NULL;
11185 attr.dw_attr_val.v.val_str = node;
11186 add_dwarf_attr (die, &attr);
11187 }
11188
11189 /* Helper function to generate top-level dies for skeleton debug_info and
11190 debug_types. */
11191
11192 static void
11193 add_top_level_skeleton_die_attrs (dw_die_ref die)
11194 {
11195 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11196 const char *comp_dir = comp_dir_string ();
11197
11198 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11199 if (comp_dir != NULL)
11200 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11201 add_AT_pubnames (die);
11202 if (addr_index_table != NULL && addr_index_table->size () > 0)
11203 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11204 }
11205
11206 /* Output skeleton debug sections that point to the dwo file. */
11207
11208 static void
11209 output_skeleton_debug_sections (dw_die_ref comp_unit,
11210 const unsigned char *dwo_id)
11211 {
11212 /* These attributes will be found in the full debug_info section. */
11213 remove_AT (comp_unit, DW_AT_producer);
11214 remove_AT (comp_unit, DW_AT_language);
11215
11216 switch_to_section (debug_skeleton_info_section);
11217 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11218
11219 /* Produce the skeleton compilation-unit header. This one differs enough from
11220 a normal CU header that it's better not to call output_compilation_unit
11221 header. */
11222 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11223 dw2_asm_output_data (4, 0xffffffff,
11224 "Initial length escape value indicating 64-bit "
11225 "DWARF extension");
11226
11227 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11228 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11229 - DWARF_INITIAL_LENGTH_SIZE
11230 + size_of_die (comp_unit),
11231 "Length of Compilation Unit Info");
11232 output_dwarf_version ();
11233 if (dwarf_version >= 5)
11234 {
11235 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11236 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11237 }
11238 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11239 debug_skeleton_abbrev_section,
11240 "Offset Into Abbrev. Section");
11241 if (dwarf_version < 5)
11242 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11243 else
11244 for (int i = 0; i < 8; i++)
11245 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11246
11247 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11248 output_die (comp_unit);
11249
11250 /* Build the skeleton debug_abbrev section. */
11251 switch_to_section (debug_skeleton_abbrev_section);
11252 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11253
11254 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11255
11256 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11257 }
11258
11259 /* Output a comdat type unit DIE and its children. */
11260
11261 static void
11262 output_comdat_type_unit (comdat_type_node *node,
11263 bool early_lto_debug ATTRIBUTE_UNUSED)
11264 {
11265 const char *secname;
11266 char *tmp;
11267 int i;
11268 #if defined (OBJECT_FORMAT_ELF)
11269 tree comdat_key;
11270 #endif
11271
11272 /* First mark all the DIEs in this CU so we know which get local refs. */
11273 mark_dies (node->root_die);
11274
11275 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11276
11277 build_abbrev_table (node->root_die, extern_map);
11278
11279 delete extern_map;
11280 extern_map = NULL;
11281
11282 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11283 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11284 calc_die_sizes (node->root_die);
11285
11286 #if defined (OBJECT_FORMAT_ELF)
11287 if (dwarf_version >= 5)
11288 {
11289 if (!dwarf_split_debug_info)
11290 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11291 else
11292 secname = (early_lto_debug
11293 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11294 }
11295 else if (!dwarf_split_debug_info)
11296 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11297 else
11298 secname = (early_lto_debug
11299 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11300
11301 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11302 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11303 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11304 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11305 comdat_key = get_identifier (tmp);
11306 targetm.asm_out.named_section (secname,
11307 SECTION_DEBUG | SECTION_LINKONCE,
11308 comdat_key);
11309 #else
11310 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11311 sprintf (tmp, (dwarf_version >= 5
11312 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11313 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11314 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11315 secname = tmp;
11316 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11317 #endif
11318
11319 /* Output debugging information. */
11320 output_compilation_unit_header (dwarf_split_debug_info
11321 ? DW_UT_split_type : DW_UT_type);
11322 output_signature (node->signature, "Type Signature");
11323 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11324 "Offset to Type DIE");
11325 output_die (node->root_die);
11326
11327 unmark_dies (node->root_die);
11328 }
11329
11330 /* Return the DWARF2/3 pubname associated with a decl. */
11331
11332 static const char *
11333 dwarf2_name (tree decl, int scope)
11334 {
11335 if (DECL_NAMELESS (decl))
11336 return NULL;
11337 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11338 }
11339
11340 /* Add a new entry to .debug_pubnames if appropriate. */
11341
11342 static void
11343 add_pubname_string (const char *str, dw_die_ref die)
11344 {
11345 pubname_entry e;
11346
11347 e.die = die;
11348 e.name = xstrdup (str);
11349 vec_safe_push (pubname_table, e);
11350 }
11351
11352 static void
11353 add_pubname (tree decl, dw_die_ref die)
11354 {
11355 if (!want_pubnames ())
11356 return;
11357
11358 /* Don't add items to the table when we expect that the consumer will have
11359 just read the enclosing die. For example, if the consumer is looking at a
11360 class_member, it will either be inside the class already, or will have just
11361 looked up the class to find the member. Either way, searching the class is
11362 faster than searching the index. */
11363 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11364 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11365 {
11366 const char *name = dwarf2_name (decl, 1);
11367
11368 if (name)
11369 add_pubname_string (name, die);
11370 }
11371 }
11372
11373 /* Add an enumerator to the pubnames section. */
11374
11375 static void
11376 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11377 {
11378 pubname_entry e;
11379
11380 gcc_assert (scope_name);
11381 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11382 e.die = die;
11383 vec_safe_push (pubname_table, e);
11384 }
11385
11386 /* Add a new entry to .debug_pubtypes if appropriate. */
11387
11388 static void
11389 add_pubtype (tree decl, dw_die_ref die)
11390 {
11391 pubname_entry e;
11392
11393 if (!want_pubnames ())
11394 return;
11395
11396 if ((TREE_PUBLIC (decl)
11397 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11398 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11399 {
11400 tree scope = NULL;
11401 const char *scope_name = "";
11402 const char *sep = is_cxx () ? "::" : ".";
11403 const char *name;
11404
11405 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11406 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11407 {
11408 scope_name = lang_hooks.dwarf_name (scope, 1);
11409 if (scope_name != NULL && scope_name[0] != '\0')
11410 scope_name = concat (scope_name, sep, NULL);
11411 else
11412 scope_name = "";
11413 }
11414
11415 if (TYPE_P (decl))
11416 name = type_tag (decl);
11417 else
11418 name = lang_hooks.dwarf_name (decl, 1);
11419
11420 /* If we don't have a name for the type, there's no point in adding
11421 it to the table. */
11422 if (name != NULL && name[0] != '\0')
11423 {
11424 e.die = die;
11425 e.name = concat (scope_name, name, NULL);
11426 vec_safe_push (pubtype_table, e);
11427 }
11428
11429 /* Although it might be more consistent to add the pubinfo for the
11430 enumerators as their dies are created, they should only be added if the
11431 enum type meets the criteria above. So rather than re-check the parent
11432 enum type whenever an enumerator die is created, just output them all
11433 here. This isn't protected by the name conditional because anonymous
11434 enums don't have names. */
11435 if (die->die_tag == DW_TAG_enumeration_type)
11436 {
11437 dw_die_ref c;
11438
11439 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11440 }
11441 }
11442 }
11443
11444 /* Output a single entry in the pubnames table. */
11445
11446 static void
11447 output_pubname (dw_offset die_offset, pubname_entry *entry)
11448 {
11449 dw_die_ref die = entry->die;
11450 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11451
11452 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11453
11454 if (debug_generate_pub_sections == 2)
11455 {
11456 /* This logic follows gdb's method for determining the value of the flag
11457 byte. */
11458 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11459 switch (die->die_tag)
11460 {
11461 case DW_TAG_typedef:
11462 case DW_TAG_base_type:
11463 case DW_TAG_subrange_type:
11464 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11465 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11466 break;
11467 case DW_TAG_enumerator:
11468 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11469 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11470 if (!is_cxx ())
11471 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11472 break;
11473 case DW_TAG_subprogram:
11474 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11475 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11476 if (!is_ada ())
11477 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11478 break;
11479 case DW_TAG_constant:
11480 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11481 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11482 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11483 break;
11484 case DW_TAG_variable:
11485 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11486 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11487 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11488 break;
11489 case DW_TAG_namespace:
11490 case DW_TAG_imported_declaration:
11491 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11492 break;
11493 case DW_TAG_class_type:
11494 case DW_TAG_interface_type:
11495 case DW_TAG_structure_type:
11496 case DW_TAG_union_type:
11497 case DW_TAG_enumeration_type:
11498 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11499 if (!is_cxx ())
11500 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11501 break;
11502 default:
11503 /* An unusual tag. Leave the flag-byte empty. */
11504 break;
11505 }
11506 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11507 "GDB-index flags");
11508 }
11509
11510 dw2_asm_output_nstring (entry->name, -1, "external name");
11511 }
11512
11513
11514 /* Output the public names table used to speed up access to externally
11515 visible names; or the public types table used to find type definitions. */
11516
11517 static void
11518 output_pubnames (vec<pubname_entry, va_gc> *names)
11519 {
11520 unsigned i;
11521 unsigned long pubnames_length = size_of_pubnames (names);
11522 pubname_entry *pub;
11523
11524 if (!XCOFF_DEBUGGING_INFO)
11525 {
11526 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11527 dw2_asm_output_data (4, 0xffffffff,
11528 "Initial length escape value indicating 64-bit DWARF extension");
11529 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11530 "Pub Info Length");
11531 }
11532
11533 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11534 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11535
11536 if (dwarf_split_debug_info)
11537 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11538 debug_skeleton_info_section,
11539 "Offset of Compilation Unit Info");
11540 else
11541 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11542 debug_info_section,
11543 "Offset of Compilation Unit Info");
11544 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11545 "Compilation Unit Length");
11546
11547 FOR_EACH_VEC_ELT (*names, i, pub)
11548 {
11549 if (include_pubname_in_output (names, pub))
11550 {
11551 dw_offset die_offset = pub->die->die_offset;
11552
11553 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11554 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11555 gcc_assert (pub->die->die_mark);
11556
11557 /* If we're putting types in their own .debug_types sections,
11558 the .debug_pubtypes table will still point to the compile
11559 unit (not the type unit), so we want to use the offset of
11560 the skeleton DIE (if there is one). */
11561 if (pub->die->comdat_type_p && names == pubtype_table)
11562 {
11563 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11564
11565 if (type_node != NULL)
11566 die_offset = (type_node->skeleton_die != NULL
11567 ? type_node->skeleton_die->die_offset
11568 : comp_unit_die ()->die_offset);
11569 }
11570
11571 output_pubname (die_offset, pub);
11572 }
11573 }
11574
11575 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11576 }
11577
11578 /* Output public names and types tables if necessary. */
11579
11580 static void
11581 output_pubtables (void)
11582 {
11583 if (!want_pubnames () || !info_section_emitted)
11584 return;
11585
11586 switch_to_section (debug_pubnames_section);
11587 output_pubnames (pubname_table);
11588 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11589 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11590 simply won't look for the section. */
11591 switch_to_section (debug_pubtypes_section);
11592 output_pubnames (pubtype_table);
11593 }
11594
11595
11596 /* Output the information that goes into the .debug_aranges table.
11597 Namely, define the beginning and ending address range of the
11598 text section generated for this compilation unit. */
11599
11600 static void
11601 output_aranges (void)
11602 {
11603 unsigned i;
11604 unsigned long aranges_length = size_of_aranges ();
11605
11606 if (!XCOFF_DEBUGGING_INFO)
11607 {
11608 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11609 dw2_asm_output_data (4, 0xffffffff,
11610 "Initial length escape value indicating 64-bit DWARF extension");
11611 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11612 "Length of Address Ranges Info");
11613 }
11614
11615 /* Version number for aranges is still 2, even up to DWARF5. */
11616 dw2_asm_output_data (2, 2, "DWARF aranges version");
11617 if (dwarf_split_debug_info)
11618 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11619 debug_skeleton_info_section,
11620 "Offset of Compilation Unit Info");
11621 else
11622 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11623 debug_info_section,
11624 "Offset of Compilation Unit Info");
11625 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11626 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11627
11628 /* We need to align to twice the pointer size here. */
11629 if (DWARF_ARANGES_PAD_SIZE)
11630 {
11631 /* Pad using a 2 byte words so that padding is correct for any
11632 pointer size. */
11633 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11634 2 * DWARF2_ADDR_SIZE);
11635 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11636 dw2_asm_output_data (2, 0, NULL);
11637 }
11638
11639 /* It is necessary not to output these entries if the sections were
11640 not used; if the sections were not used, the length will be 0 and
11641 the address may end up as 0 if the section is discarded by ld
11642 --gc-sections, leaving an invalid (0, 0) entry that can be
11643 confused with the terminator. */
11644 if (text_section_used)
11645 {
11646 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11647 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11648 text_section_label, "Length");
11649 }
11650 if (cold_text_section_used)
11651 {
11652 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11653 "Address");
11654 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11655 cold_text_section_label, "Length");
11656 }
11657
11658 if (have_multiple_function_sections)
11659 {
11660 unsigned fde_idx;
11661 dw_fde_ref fde;
11662
11663 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11664 {
11665 if (DECL_IGNORED_P (fde->decl))
11666 continue;
11667 if (!fde->in_std_section)
11668 {
11669 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11670 "Address");
11671 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11672 fde->dw_fde_begin, "Length");
11673 }
11674 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11675 {
11676 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11677 "Address");
11678 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11679 fde->dw_fde_second_begin, "Length");
11680 }
11681 }
11682 }
11683
11684 /* Output the terminator words. */
11685 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11686 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11687 }
11688
11689 /* Add a new entry to .debug_ranges. Return its index into
11690 ranges_table vector. */
11691
11692 static unsigned int
11693 add_ranges_num (int num, bool maybe_new_sec)
11694 {
11695 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11696 vec_safe_push (ranges_table, r);
11697 return vec_safe_length (ranges_table) - 1;
11698 }
11699
11700 /* Add a new entry to .debug_ranges corresponding to a block, or a
11701 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11702 this entry might be in a different section from previous range. */
11703
11704 static unsigned int
11705 add_ranges (const_tree block, bool maybe_new_sec)
11706 {
11707 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11708 }
11709
11710 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11711 chain, or middle entry of a chain that will be directly referred to. */
11712
11713 static void
11714 note_rnglist_head (unsigned int offset)
11715 {
11716 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11717 return;
11718 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11719 }
11720
11721 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11722 When using dwarf_split_debug_info, address attributes in dies destined
11723 for the final executable should be direct references--setting the
11724 parameter force_direct ensures this behavior. */
11725
11726 static void
11727 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11728 bool *added, bool force_direct)
11729 {
11730 unsigned int in_use = vec_safe_length (ranges_by_label);
11731 unsigned int offset;
11732 dw_ranges_by_label rbl = { begin, end };
11733 vec_safe_push (ranges_by_label, rbl);
11734 offset = add_ranges_num (-(int)in_use - 1, true);
11735 if (!*added)
11736 {
11737 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11738 *added = true;
11739 note_rnglist_head (offset);
11740 }
11741 }
11742
11743 /* Emit .debug_ranges section. */
11744
11745 static void
11746 output_ranges (void)
11747 {
11748 unsigned i;
11749 static const char *const start_fmt = "Offset %#x";
11750 const char *fmt = start_fmt;
11751 dw_ranges *r;
11752
11753 switch_to_section (debug_ranges_section);
11754 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11755 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11756 {
11757 int block_num = r->num;
11758
11759 if (block_num > 0)
11760 {
11761 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11762 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11763
11764 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11765 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11766
11767 /* If all code is in the text section, then the compilation
11768 unit base address defaults to DW_AT_low_pc, which is the
11769 base of the text section. */
11770 if (!have_multiple_function_sections)
11771 {
11772 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11773 text_section_label,
11774 fmt, i * 2 * DWARF2_ADDR_SIZE);
11775 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11776 text_section_label, NULL);
11777 }
11778
11779 /* Otherwise, the compilation unit base address is zero,
11780 which allows us to use absolute addresses, and not worry
11781 about whether the target supports cross-section
11782 arithmetic. */
11783 else
11784 {
11785 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11786 fmt, i * 2 * DWARF2_ADDR_SIZE);
11787 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11788 }
11789
11790 fmt = NULL;
11791 }
11792
11793 /* Negative block_num stands for an index into ranges_by_label. */
11794 else if (block_num < 0)
11795 {
11796 int lab_idx = - block_num - 1;
11797
11798 if (!have_multiple_function_sections)
11799 {
11800 gcc_unreachable ();
11801 #if 0
11802 /* If we ever use add_ranges_by_labels () for a single
11803 function section, all we have to do is to take out
11804 the #if 0 above. */
11805 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11806 (*ranges_by_label)[lab_idx].begin,
11807 text_section_label,
11808 fmt, i * 2 * DWARF2_ADDR_SIZE);
11809 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11810 (*ranges_by_label)[lab_idx].end,
11811 text_section_label, NULL);
11812 #endif
11813 }
11814 else
11815 {
11816 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11817 (*ranges_by_label)[lab_idx].begin,
11818 fmt, i * 2 * DWARF2_ADDR_SIZE);
11819 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11820 (*ranges_by_label)[lab_idx].end,
11821 NULL);
11822 }
11823 }
11824 else
11825 {
11826 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11827 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11828 fmt = start_fmt;
11829 }
11830 }
11831 }
11832
11833 /* Non-zero if .debug_line_str should be used for .debug_line section
11834 strings or strings that are likely shareable with those. */
11835 #define DWARF5_USE_DEBUG_LINE_STR \
11836 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11837 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11838 /* FIXME: there is no .debug_line_str.dwo section, \
11839 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11840 && !dwarf_split_debug_info)
11841
11842 /* Assign .debug_rnglists indexes. */
11843
11844 static void
11845 index_rnglists (void)
11846 {
11847 unsigned i;
11848 dw_ranges *r;
11849
11850 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11851 if (r->label)
11852 r->idx = rnglist_idx++;
11853 }
11854
11855 /* Emit .debug_rnglists section. */
11856
11857 static void
11858 output_rnglists (unsigned generation)
11859 {
11860 unsigned i;
11861 dw_ranges *r;
11862 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11863 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11864 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11865
11866 switch_to_section (debug_ranges_section);
11867 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11868 /* There are up to 4 unique ranges labels per generation.
11869 See also init_sections_and_labels. */
11870 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11871 2 + generation * 4);
11872 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11873 3 + generation * 4);
11874 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11875 dw2_asm_output_data (4, 0xffffffff,
11876 "Initial length escape value indicating "
11877 "64-bit DWARF extension");
11878 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11879 "Length of Range Lists");
11880 ASM_OUTPUT_LABEL (asm_out_file, l1);
11881 output_dwarf_version ();
11882 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11883 dw2_asm_output_data (1, 0, "Segment Size");
11884 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11885 about relocation sizes and primarily care about the size of .debug*
11886 sections in linked shared libraries and executables, then
11887 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11888 into it are usually larger than just DW_FORM_sec_offset offsets
11889 into the .debug_rnglists section. */
11890 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11891 "Offset Entry Count");
11892 if (dwarf_split_debug_info)
11893 {
11894 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11895 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11896 if (r->label)
11897 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11898 ranges_base_label, NULL);
11899 }
11900
11901 const char *lab = "";
11902 unsigned int len = vec_safe_length (ranges_table);
11903 const char *base = NULL;
11904 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11905 {
11906 int block_num = r->num;
11907
11908 if (r->label)
11909 {
11910 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11911 lab = r->label;
11912 }
11913 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11914 base = NULL;
11915 if (block_num > 0)
11916 {
11917 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11918 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11919
11920 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11921 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11922
11923 if (HAVE_AS_LEB128)
11924 {
11925 /* If all code is in the text section, then the compilation
11926 unit base address defaults to DW_AT_low_pc, which is the
11927 base of the text section. */
11928 if (!have_multiple_function_sections)
11929 {
11930 dw2_asm_output_data (1, DW_RLE_offset_pair,
11931 "DW_RLE_offset_pair (%s)", lab);
11932 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11933 "Range begin address (%s)", lab);
11934 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11935 "Range end address (%s)", lab);
11936 continue;
11937 }
11938 if (base == NULL)
11939 {
11940 dw_ranges *r2 = NULL;
11941 if (i < len - 1)
11942 r2 = &(*ranges_table)[i + 1];
11943 if (r2
11944 && r2->num != 0
11945 && r2->label == NULL
11946 && !r2->maybe_new_sec)
11947 {
11948 dw2_asm_output_data (1, DW_RLE_base_address,
11949 "DW_RLE_base_address (%s)", lab);
11950 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11951 "Base address (%s)", lab);
11952 strcpy (basebuf, blabel);
11953 base = basebuf;
11954 }
11955 }
11956 if (base)
11957 {
11958 dw2_asm_output_data (1, DW_RLE_offset_pair,
11959 "DW_RLE_offset_pair (%s)", lab);
11960 dw2_asm_output_delta_uleb128 (blabel, base,
11961 "Range begin address (%s)", lab);
11962 dw2_asm_output_delta_uleb128 (elabel, base,
11963 "Range end address (%s)", lab);
11964 continue;
11965 }
11966 dw2_asm_output_data (1, DW_RLE_start_length,
11967 "DW_RLE_start_length (%s)", lab);
11968 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11969 "Range begin address (%s)", lab);
11970 dw2_asm_output_delta_uleb128 (elabel, blabel,
11971 "Range length (%s)", lab);
11972 }
11973 else
11974 {
11975 dw2_asm_output_data (1, DW_RLE_start_end,
11976 "DW_RLE_start_end (%s)", lab);
11977 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11978 "Range begin address (%s)", lab);
11979 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11980 "Range end address (%s)", lab);
11981 }
11982 }
11983
11984 /* Negative block_num stands for an index into ranges_by_label. */
11985 else if (block_num < 0)
11986 {
11987 int lab_idx = - block_num - 1;
11988 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11989 const char *elabel = (*ranges_by_label)[lab_idx].end;
11990
11991 if (!have_multiple_function_sections)
11992 gcc_unreachable ();
11993 if (HAVE_AS_LEB128)
11994 {
11995 dw2_asm_output_data (1, DW_RLE_start_length,
11996 "DW_RLE_start_length (%s)", lab);
11997 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11998 "Range begin address (%s)", lab);
11999 dw2_asm_output_delta_uleb128 (elabel, blabel,
12000 "Range length (%s)", lab);
12001 }
12002 else
12003 {
12004 dw2_asm_output_data (1, DW_RLE_start_end,
12005 "DW_RLE_start_end (%s)", lab);
12006 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12007 "Range begin address (%s)", lab);
12008 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12009 "Range end address (%s)", lab);
12010 }
12011 }
12012 else
12013 dw2_asm_output_data (1, DW_RLE_end_of_list,
12014 "DW_RLE_end_of_list (%s)", lab);
12015 }
12016 ASM_OUTPUT_LABEL (asm_out_file, l2);
12017 }
12018
12019 /* Data structure containing information about input files. */
12020 struct file_info
12021 {
12022 const char *path; /* Complete file name. */
12023 const char *fname; /* File name part. */
12024 int length; /* Length of entire string. */
12025 struct dwarf_file_data * file_idx; /* Index in input file table. */
12026 int dir_idx; /* Index in directory table. */
12027 };
12028
12029 /* Data structure containing information about directories with source
12030 files. */
12031 struct dir_info
12032 {
12033 const char *path; /* Path including directory name. */
12034 int length; /* Path length. */
12035 int prefix; /* Index of directory entry which is a prefix. */
12036 int count; /* Number of files in this directory. */
12037 int dir_idx; /* Index of directory used as base. */
12038 };
12039
12040 /* Callback function for file_info comparison. We sort by looking at
12041 the directories in the path. */
12042
12043 static int
12044 file_info_cmp (const void *p1, const void *p2)
12045 {
12046 const struct file_info *const s1 = (const struct file_info *) p1;
12047 const struct file_info *const s2 = (const struct file_info *) p2;
12048 const unsigned char *cp1;
12049 const unsigned char *cp2;
12050
12051 /* Take care of file names without directories. We need to make sure that
12052 we return consistent values to qsort since some will get confused if
12053 we return the same value when identical operands are passed in opposite
12054 orders. So if neither has a directory, return 0 and otherwise return
12055 1 or -1 depending on which one has the directory. We want the one with
12056 the directory to sort after the one without, so all no directory files
12057 are at the start (normally only the compilation unit file). */
12058 if ((s1->path == s1->fname || s2->path == s2->fname))
12059 return (s2->path == s2->fname) - (s1->path == s1->fname);
12060
12061 cp1 = (const unsigned char *) s1->path;
12062 cp2 = (const unsigned char *) s2->path;
12063
12064 while (1)
12065 {
12066 ++cp1;
12067 ++cp2;
12068 /* Reached the end of the first path? If so, handle like above,
12069 but now we want longer directory prefixes before shorter ones. */
12070 if ((cp1 == (const unsigned char *) s1->fname)
12071 || (cp2 == (const unsigned char *) s2->fname))
12072 return ((cp1 == (const unsigned char *) s1->fname)
12073 - (cp2 == (const unsigned char *) s2->fname));
12074
12075 /* Character of current path component the same? */
12076 else if (*cp1 != *cp2)
12077 return *cp1 - *cp2;
12078 }
12079 }
12080
12081 struct file_name_acquire_data
12082 {
12083 struct file_info *files;
12084 int used_files;
12085 int max_files;
12086 };
12087
12088 /* Traversal function for the hash table. */
12089
12090 int
12091 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12092 {
12093 struct dwarf_file_data *d = *slot;
12094 struct file_info *fi;
12095 const char *f;
12096
12097 gcc_assert (fnad->max_files >= d->emitted_number);
12098
12099 if (! d->emitted_number)
12100 return 1;
12101
12102 gcc_assert (fnad->max_files != fnad->used_files);
12103
12104 fi = fnad->files + fnad->used_files++;
12105
12106 f = remap_debug_filename (d->filename);
12107
12108 /* Skip all leading "./". */
12109 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12110 f += 2;
12111
12112 /* Create a new array entry. */
12113 fi->path = f;
12114 fi->length = strlen (f);
12115 fi->file_idx = d;
12116
12117 /* Search for the file name part. */
12118 f = strrchr (f, DIR_SEPARATOR);
12119 #if defined (DIR_SEPARATOR_2)
12120 {
12121 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12122
12123 if (g != NULL)
12124 {
12125 if (f == NULL || f < g)
12126 f = g;
12127 }
12128 }
12129 #endif
12130
12131 fi->fname = f == NULL ? fi->path : f + 1;
12132 return 1;
12133 }
12134
12135 /* Helper function for output_file_names. Emit a FORM encoded
12136 string STR, with assembly comment start ENTRY_KIND and
12137 index IDX */
12138
12139 static void
12140 output_line_string (enum dwarf_form form, const char *str,
12141 const char *entry_kind, unsigned int idx)
12142 {
12143 switch (form)
12144 {
12145 case DW_FORM_string:
12146 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12147 break;
12148 case DW_FORM_line_strp:
12149 if (!debug_line_str_hash)
12150 debug_line_str_hash
12151 = hash_table<indirect_string_hasher>::create_ggc (10);
12152
12153 struct indirect_string_node *node;
12154 node = find_AT_string_in_table (str, debug_line_str_hash);
12155 set_indirect_string (node);
12156 node->form = form;
12157 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12158 debug_line_str_section, "%s: %#x: \"%s\"",
12159 entry_kind, 0, node->str);
12160 break;
12161 default:
12162 gcc_unreachable ();
12163 }
12164 }
12165
12166 /* Output the directory table and the file name table. We try to minimize
12167 the total amount of memory needed. A heuristic is used to avoid large
12168 slowdowns with many input files. */
12169
12170 static void
12171 output_file_names (void)
12172 {
12173 struct file_name_acquire_data fnad;
12174 int numfiles;
12175 struct file_info *files;
12176 struct dir_info *dirs;
12177 int *saved;
12178 int *savehere;
12179 int *backmap;
12180 int ndirs;
12181 int idx_offset;
12182 int i;
12183
12184 if (!last_emitted_file)
12185 {
12186 if (dwarf_version >= 5)
12187 {
12188 dw2_asm_output_data (1, 0, "Directory entry format count");
12189 dw2_asm_output_data_uleb128 (0, "Directories count");
12190 dw2_asm_output_data (1, 0, "File name entry format count");
12191 dw2_asm_output_data_uleb128 (0, "File names count");
12192 }
12193 else
12194 {
12195 dw2_asm_output_data (1, 0, "End directory table");
12196 dw2_asm_output_data (1, 0, "End file name table");
12197 }
12198 return;
12199 }
12200
12201 numfiles = last_emitted_file->emitted_number;
12202
12203 /* Allocate the various arrays we need. */
12204 files = XALLOCAVEC (struct file_info, numfiles);
12205 dirs = XALLOCAVEC (struct dir_info, numfiles);
12206
12207 fnad.files = files;
12208 fnad.used_files = 0;
12209 fnad.max_files = numfiles;
12210 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12211 gcc_assert (fnad.used_files == fnad.max_files);
12212
12213 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12214
12215 /* Find all the different directories used. */
12216 dirs[0].path = files[0].path;
12217 dirs[0].length = files[0].fname - files[0].path;
12218 dirs[0].prefix = -1;
12219 dirs[0].count = 1;
12220 dirs[0].dir_idx = 0;
12221 files[0].dir_idx = 0;
12222 ndirs = 1;
12223
12224 for (i = 1; i < numfiles; i++)
12225 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12226 && memcmp (dirs[ndirs - 1].path, files[i].path,
12227 dirs[ndirs - 1].length) == 0)
12228 {
12229 /* Same directory as last entry. */
12230 files[i].dir_idx = ndirs - 1;
12231 ++dirs[ndirs - 1].count;
12232 }
12233 else
12234 {
12235 int j;
12236
12237 /* This is a new directory. */
12238 dirs[ndirs].path = files[i].path;
12239 dirs[ndirs].length = files[i].fname - files[i].path;
12240 dirs[ndirs].count = 1;
12241 dirs[ndirs].dir_idx = ndirs;
12242 files[i].dir_idx = ndirs;
12243
12244 /* Search for a prefix. */
12245 dirs[ndirs].prefix = -1;
12246 for (j = 0; j < ndirs; j++)
12247 if (dirs[j].length < dirs[ndirs].length
12248 && dirs[j].length > 1
12249 && (dirs[ndirs].prefix == -1
12250 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12251 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12252 dirs[ndirs].prefix = j;
12253
12254 ++ndirs;
12255 }
12256
12257 /* Now to the actual work. We have to find a subset of the directories which
12258 allow expressing the file name using references to the directory table
12259 with the least amount of characters. We do not do an exhaustive search
12260 where we would have to check out every combination of every single
12261 possible prefix. Instead we use a heuristic which provides nearly optimal
12262 results in most cases and never is much off. */
12263 saved = XALLOCAVEC (int, ndirs);
12264 savehere = XALLOCAVEC (int, ndirs);
12265
12266 memset (saved, '\0', ndirs * sizeof (saved[0]));
12267 for (i = 0; i < ndirs; i++)
12268 {
12269 int j;
12270 int total;
12271
12272 /* We can always save some space for the current directory. But this
12273 does not mean it will be enough to justify adding the directory. */
12274 savehere[i] = dirs[i].length;
12275 total = (savehere[i] - saved[i]) * dirs[i].count;
12276
12277 for (j = i + 1; j < ndirs; j++)
12278 {
12279 savehere[j] = 0;
12280 if (saved[j] < dirs[i].length)
12281 {
12282 /* Determine whether the dirs[i] path is a prefix of the
12283 dirs[j] path. */
12284 int k;
12285
12286 k = dirs[j].prefix;
12287 while (k != -1 && k != (int) i)
12288 k = dirs[k].prefix;
12289
12290 if (k == (int) i)
12291 {
12292 /* Yes it is. We can possibly save some memory by
12293 writing the filenames in dirs[j] relative to
12294 dirs[i]. */
12295 savehere[j] = dirs[i].length;
12296 total += (savehere[j] - saved[j]) * dirs[j].count;
12297 }
12298 }
12299 }
12300
12301 /* Check whether we can save enough to justify adding the dirs[i]
12302 directory. */
12303 if (total > dirs[i].length + 1)
12304 {
12305 /* It's worthwhile adding. */
12306 for (j = i; j < ndirs; j++)
12307 if (savehere[j] > 0)
12308 {
12309 /* Remember how much we saved for this directory so far. */
12310 saved[j] = savehere[j];
12311
12312 /* Remember the prefix directory. */
12313 dirs[j].dir_idx = i;
12314 }
12315 }
12316 }
12317
12318 /* Emit the directory name table. */
12319 idx_offset = dirs[0].length > 0 ? 1 : 0;
12320 enum dwarf_form str_form = DW_FORM_string;
12321 enum dwarf_form idx_form = DW_FORM_udata;
12322 if (dwarf_version >= 5)
12323 {
12324 const char *comp_dir = comp_dir_string ();
12325 if (comp_dir == NULL)
12326 comp_dir = "";
12327 dw2_asm_output_data (1, 1, "Directory entry format count");
12328 if (DWARF5_USE_DEBUG_LINE_STR)
12329 str_form = DW_FORM_line_strp;
12330 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12331 dw2_asm_output_data_uleb128 (str_form, "%s",
12332 get_DW_FORM_name (str_form));
12333 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12334 if (str_form == DW_FORM_string)
12335 {
12336 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12337 for (i = 1 - idx_offset; i < ndirs; i++)
12338 dw2_asm_output_nstring (dirs[i].path,
12339 dirs[i].length
12340 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12341 "Directory Entry: %#x", i + idx_offset);
12342 }
12343 else
12344 {
12345 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12346 for (i = 1 - idx_offset; i < ndirs; i++)
12347 {
12348 const char *str
12349 = ggc_alloc_string (dirs[i].path,
12350 dirs[i].length
12351 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12352 output_line_string (str_form, str, "Directory Entry",
12353 (unsigned) i + idx_offset);
12354 }
12355 }
12356 }
12357 else
12358 {
12359 for (i = 1 - idx_offset; i < ndirs; i++)
12360 dw2_asm_output_nstring (dirs[i].path,
12361 dirs[i].length
12362 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12363 "Directory Entry: %#x", i + idx_offset);
12364
12365 dw2_asm_output_data (1, 0, "End directory table");
12366 }
12367
12368 /* We have to emit them in the order of emitted_number since that's
12369 used in the debug info generation. To do this efficiently we
12370 generate a back-mapping of the indices first. */
12371 backmap = XALLOCAVEC (int, numfiles);
12372 for (i = 0; i < numfiles; i++)
12373 backmap[files[i].file_idx->emitted_number - 1] = i;
12374
12375 if (dwarf_version >= 5)
12376 {
12377 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12378 if (filename0 == NULL)
12379 filename0 = "";
12380 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12381 DW_FORM_data2. Choose one based on the number of directories
12382 and how much space would they occupy in each encoding.
12383 If we have at most 256 directories, all indexes fit into
12384 a single byte, so DW_FORM_data1 is most compact (if there
12385 are at most 128 directories, DW_FORM_udata would be as
12386 compact as that, but not shorter and slower to decode). */
12387 if (ndirs + idx_offset <= 256)
12388 idx_form = DW_FORM_data1;
12389 /* If there are more than 65536 directories, we have to use
12390 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12391 Otherwise, compute what space would occupy if all the indexes
12392 used DW_FORM_udata - sum - and compare that to how large would
12393 be DW_FORM_data2 encoding, and pick the more efficient one. */
12394 else if (ndirs + idx_offset <= 65536)
12395 {
12396 unsigned HOST_WIDE_INT sum = 1;
12397 for (i = 0; i < numfiles; i++)
12398 {
12399 int file_idx = backmap[i];
12400 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12401 sum += size_of_uleb128 (dir_idx);
12402 }
12403 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12404 idx_form = DW_FORM_data2;
12405 }
12406 #ifdef VMS_DEBUGGING_INFO
12407 dw2_asm_output_data (1, 4, "File name entry format count");
12408 #else
12409 dw2_asm_output_data (1, 2, "File name entry format count");
12410 #endif
12411 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12412 dw2_asm_output_data_uleb128 (str_form, "%s",
12413 get_DW_FORM_name (str_form));
12414 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12415 "DW_LNCT_directory_index");
12416 dw2_asm_output_data_uleb128 (idx_form, "%s",
12417 get_DW_FORM_name (idx_form));
12418 #ifdef VMS_DEBUGGING_INFO
12419 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12420 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12421 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12422 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12423 #endif
12424 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12425
12426 output_line_string (str_form, filename0, "File Entry", 0);
12427
12428 /* Include directory index. */
12429 if (idx_form != DW_FORM_udata)
12430 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12431 0, NULL);
12432 else
12433 dw2_asm_output_data_uleb128 (0, NULL);
12434
12435 #ifdef VMS_DEBUGGING_INFO
12436 dw2_asm_output_data_uleb128 (0, NULL);
12437 dw2_asm_output_data_uleb128 (0, NULL);
12438 #endif
12439 }
12440
12441 /* Now write all the file names. */
12442 for (i = 0; i < numfiles; i++)
12443 {
12444 int file_idx = backmap[i];
12445 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12446
12447 #ifdef VMS_DEBUGGING_INFO
12448 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12449
12450 /* Setting these fields can lead to debugger miscomparisons,
12451 but VMS Debug requires them to be set correctly. */
12452
12453 int ver;
12454 long long cdt;
12455 long siz;
12456 int maxfilelen = (strlen (files[file_idx].path)
12457 + dirs[dir_idx].length
12458 + MAX_VMS_VERSION_LEN + 1);
12459 char *filebuf = XALLOCAVEC (char, maxfilelen);
12460
12461 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12462 snprintf (filebuf, maxfilelen, "%s;%d",
12463 files[file_idx].path + dirs[dir_idx].length, ver);
12464
12465 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12466
12467 /* Include directory index. */
12468 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12469 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12470 dir_idx + idx_offset, NULL);
12471 else
12472 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12473
12474 /* Modification time. */
12475 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12476 &cdt, 0, 0, 0) == 0)
12477 ? cdt : 0, NULL);
12478
12479 /* File length in bytes. */
12480 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12481 0, &siz, 0, 0) == 0)
12482 ? siz : 0, NULL);
12483 #else
12484 output_line_string (str_form,
12485 files[file_idx].path + dirs[dir_idx].length,
12486 "File Entry", (unsigned) i + 1);
12487
12488 /* Include directory index. */
12489 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12490 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12491 dir_idx + idx_offset, NULL);
12492 else
12493 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12494
12495 if (dwarf_version >= 5)
12496 continue;
12497
12498 /* Modification time. */
12499 dw2_asm_output_data_uleb128 (0, NULL);
12500
12501 /* File length in bytes. */
12502 dw2_asm_output_data_uleb128 (0, NULL);
12503 #endif /* VMS_DEBUGGING_INFO */
12504 }
12505
12506 if (dwarf_version < 5)
12507 dw2_asm_output_data (1, 0, "End file name table");
12508 }
12509
12510
12511 /* Output one line number table into the .debug_line section. */
12512
12513 static void
12514 output_one_line_info_table (dw_line_info_table *table)
12515 {
12516 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12517 unsigned int current_line = 1;
12518 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12519 dw_line_info_entry *ent, *prev_addr;
12520 size_t i;
12521 unsigned int view;
12522
12523 view = 0;
12524
12525 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12526 {
12527 switch (ent->opcode)
12528 {
12529 case LI_set_address:
12530 /* ??? Unfortunately, we have little choice here currently, and
12531 must always use the most general form. GCC does not know the
12532 address delta itself, so we can't use DW_LNS_advance_pc. Many
12533 ports do have length attributes which will give an upper bound
12534 on the address range. We could perhaps use length attributes
12535 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12536 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12537
12538 view = 0;
12539
12540 /* This can handle any delta. This takes
12541 4+DWARF2_ADDR_SIZE bytes. */
12542 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12543 debug_variable_location_views
12544 ? ", reset view to 0" : "");
12545 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12546 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12547 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12548
12549 prev_addr = ent;
12550 break;
12551
12552 case LI_adv_address:
12553 {
12554 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12555 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12556 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12557
12558 view++;
12559
12560 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12561 dw2_asm_output_delta (2, line_label, prev_label,
12562 "from %s to %s", prev_label, line_label);
12563
12564 prev_addr = ent;
12565 break;
12566 }
12567
12568 case LI_set_line:
12569 if (ent->val == current_line)
12570 {
12571 /* We still need to start a new row, so output a copy insn. */
12572 dw2_asm_output_data (1, DW_LNS_copy,
12573 "copy line %u", current_line);
12574 }
12575 else
12576 {
12577 int line_offset = ent->val - current_line;
12578 int line_delta = line_offset - DWARF_LINE_BASE;
12579
12580 current_line = ent->val;
12581 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12582 {
12583 /* This can handle deltas from -10 to 234, using the current
12584 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12585 This takes 1 byte. */
12586 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12587 "line %u", current_line);
12588 }
12589 else
12590 {
12591 /* This can handle any delta. This takes at least 4 bytes,
12592 depending on the value being encoded. */
12593 dw2_asm_output_data (1, DW_LNS_advance_line,
12594 "advance to line %u", current_line);
12595 dw2_asm_output_data_sleb128 (line_offset, NULL);
12596 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12597 }
12598 }
12599 break;
12600
12601 case LI_set_file:
12602 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12603 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12604 break;
12605
12606 case LI_set_column:
12607 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12608 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12609 break;
12610
12611 case LI_negate_stmt:
12612 current_is_stmt = !current_is_stmt;
12613 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12614 "is_stmt %d", current_is_stmt);
12615 break;
12616
12617 case LI_set_prologue_end:
12618 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12619 "set prologue end");
12620 break;
12621
12622 case LI_set_epilogue_begin:
12623 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12624 "set epilogue begin");
12625 break;
12626
12627 case LI_set_discriminator:
12628 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12629 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12630 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12631 dw2_asm_output_data_uleb128 (ent->val, NULL);
12632 break;
12633 }
12634 }
12635
12636 /* Emit debug info for the address of the end of the table. */
12637 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12638 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12639 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12640 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12641
12642 dw2_asm_output_data (1, 0, "end sequence");
12643 dw2_asm_output_data_uleb128 (1, NULL);
12644 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12645 }
12646
12647 /* Output the source line number correspondence information. This
12648 information goes into the .debug_line section. */
12649
12650 static void
12651 output_line_info (bool prologue_only)
12652 {
12653 static unsigned int generation;
12654 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12655 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12656 bool saw_one = false;
12657 int opc;
12658
12659 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12660 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12661 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12662 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12663
12664 if (!XCOFF_DEBUGGING_INFO)
12665 {
12666 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12667 dw2_asm_output_data (4, 0xffffffff,
12668 "Initial length escape value indicating 64-bit DWARF extension");
12669 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12670 "Length of Source Line Info");
12671 }
12672
12673 ASM_OUTPUT_LABEL (asm_out_file, l1);
12674
12675 output_dwarf_version ();
12676 if (dwarf_version >= 5)
12677 {
12678 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12679 dw2_asm_output_data (1, 0, "Segment Size");
12680 }
12681 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12682 ASM_OUTPUT_LABEL (asm_out_file, p1);
12683
12684 /* Define the architecture-dependent minimum instruction length (in bytes).
12685 In this implementation of DWARF, this field is used for information
12686 purposes only. Since GCC generates assembly language, we have no
12687 a priori knowledge of how many instruction bytes are generated for each
12688 source line, and therefore can use only the DW_LNE_set_address and
12689 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12690 this as '1', which is "correct enough" for all architectures,
12691 and don't let the target override. */
12692 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12693
12694 if (dwarf_version >= 4)
12695 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12696 "Maximum Operations Per Instruction");
12697 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12698 "Default is_stmt_start flag");
12699 dw2_asm_output_data (1, DWARF_LINE_BASE,
12700 "Line Base Value (Special Opcodes)");
12701 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12702 "Line Range Value (Special Opcodes)");
12703 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12704 "Special Opcode Base");
12705
12706 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12707 {
12708 int n_op_args;
12709 switch (opc)
12710 {
12711 case DW_LNS_advance_pc:
12712 case DW_LNS_advance_line:
12713 case DW_LNS_set_file:
12714 case DW_LNS_set_column:
12715 case DW_LNS_fixed_advance_pc:
12716 case DW_LNS_set_isa:
12717 n_op_args = 1;
12718 break;
12719 default:
12720 n_op_args = 0;
12721 break;
12722 }
12723
12724 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12725 opc, n_op_args);
12726 }
12727
12728 /* Write out the information about the files we use. */
12729 output_file_names ();
12730 ASM_OUTPUT_LABEL (asm_out_file, p2);
12731 if (prologue_only)
12732 {
12733 /* Output the marker for the end of the line number info. */
12734 ASM_OUTPUT_LABEL (asm_out_file, l2);
12735 return;
12736 }
12737
12738 if (separate_line_info)
12739 {
12740 dw_line_info_table *table;
12741 size_t i;
12742
12743 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12744 if (table->in_use)
12745 {
12746 output_one_line_info_table (table);
12747 saw_one = true;
12748 }
12749 }
12750 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12751 {
12752 output_one_line_info_table (cold_text_section_line_info);
12753 saw_one = true;
12754 }
12755
12756 /* ??? Some Darwin linkers crash on a .debug_line section with no
12757 sequences. Further, merely a DW_LNE_end_sequence entry is not
12758 sufficient -- the address column must also be initialized.
12759 Make sure to output at least one set_address/end_sequence pair,
12760 choosing .text since that section is always present. */
12761 if (text_section_line_info->in_use || !saw_one)
12762 output_one_line_info_table (text_section_line_info);
12763
12764 /* Output the marker for the end of the line number info. */
12765 ASM_OUTPUT_LABEL (asm_out_file, l2);
12766 }
12767 \f
12768 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12769
12770 static inline bool
12771 need_endianity_attribute_p (bool reverse)
12772 {
12773 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12774 }
12775
12776 /* Given a pointer to a tree node for some base type, return a pointer to
12777 a DIE that describes the given type. REVERSE is true if the type is
12778 to be interpreted in the reverse storage order wrt the target order.
12779
12780 This routine must only be called for GCC type nodes that correspond to
12781 Dwarf base (fundamental) types. */
12782
12783 static dw_die_ref
12784 base_type_die (tree type, bool reverse)
12785 {
12786 dw_die_ref base_type_result;
12787 enum dwarf_type encoding;
12788 bool fpt_used = false;
12789 struct fixed_point_type_info fpt_info;
12790 tree type_bias = NULL_TREE;
12791
12792 /* If this is a subtype that should not be emitted as a subrange type,
12793 use the base type. See subrange_type_for_debug_p. */
12794 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12795 type = TREE_TYPE (type);
12796
12797 switch (TREE_CODE (type))
12798 {
12799 case INTEGER_TYPE:
12800 if ((dwarf_version >= 4 || !dwarf_strict)
12801 && TYPE_NAME (type)
12802 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12803 && DECL_IS_BUILTIN (TYPE_NAME (type))
12804 && DECL_NAME (TYPE_NAME (type)))
12805 {
12806 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12807 if (strcmp (name, "char16_t") == 0
12808 || strcmp (name, "char32_t") == 0)
12809 {
12810 encoding = DW_ATE_UTF;
12811 break;
12812 }
12813 }
12814 if ((dwarf_version >= 3 || !dwarf_strict)
12815 && lang_hooks.types.get_fixed_point_type_info)
12816 {
12817 memset (&fpt_info, 0, sizeof (fpt_info));
12818 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12819 {
12820 fpt_used = true;
12821 encoding = ((TYPE_UNSIGNED (type))
12822 ? DW_ATE_unsigned_fixed
12823 : DW_ATE_signed_fixed);
12824 break;
12825 }
12826 }
12827 if (TYPE_STRING_FLAG (type))
12828 {
12829 if (TYPE_UNSIGNED (type))
12830 encoding = DW_ATE_unsigned_char;
12831 else
12832 encoding = DW_ATE_signed_char;
12833 }
12834 else if (TYPE_UNSIGNED (type))
12835 encoding = DW_ATE_unsigned;
12836 else
12837 encoding = DW_ATE_signed;
12838
12839 if (!dwarf_strict
12840 && lang_hooks.types.get_type_bias)
12841 type_bias = lang_hooks.types.get_type_bias (type);
12842 break;
12843
12844 case REAL_TYPE:
12845 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12846 {
12847 if (dwarf_version >= 3 || !dwarf_strict)
12848 encoding = DW_ATE_decimal_float;
12849 else
12850 encoding = DW_ATE_lo_user;
12851 }
12852 else
12853 encoding = DW_ATE_float;
12854 break;
12855
12856 case FIXED_POINT_TYPE:
12857 if (!(dwarf_version >= 3 || !dwarf_strict))
12858 encoding = DW_ATE_lo_user;
12859 else if (TYPE_UNSIGNED (type))
12860 encoding = DW_ATE_unsigned_fixed;
12861 else
12862 encoding = DW_ATE_signed_fixed;
12863 break;
12864
12865 /* Dwarf2 doesn't know anything about complex ints, so use
12866 a user defined type for it. */
12867 case COMPLEX_TYPE:
12868 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12869 encoding = DW_ATE_complex_float;
12870 else
12871 encoding = DW_ATE_lo_user;
12872 break;
12873
12874 case BOOLEAN_TYPE:
12875 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12876 encoding = DW_ATE_boolean;
12877 break;
12878
12879 default:
12880 /* No other TREE_CODEs are Dwarf fundamental types. */
12881 gcc_unreachable ();
12882 }
12883
12884 base_type_result = new_die_raw (DW_TAG_base_type);
12885
12886 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12887 int_size_in_bytes (type));
12888 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12889
12890 if (need_endianity_attribute_p (reverse))
12891 add_AT_unsigned (base_type_result, DW_AT_endianity,
12892 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12893
12894 add_alignment_attribute (base_type_result, type);
12895
12896 if (fpt_used)
12897 {
12898 switch (fpt_info.scale_factor_kind)
12899 {
12900 case fixed_point_scale_factor_binary:
12901 add_AT_int (base_type_result, DW_AT_binary_scale,
12902 fpt_info.scale_factor.binary);
12903 break;
12904
12905 case fixed_point_scale_factor_decimal:
12906 add_AT_int (base_type_result, DW_AT_decimal_scale,
12907 fpt_info.scale_factor.decimal);
12908 break;
12909
12910 case fixed_point_scale_factor_arbitrary:
12911 /* Arbitrary scale factors cannot be described in standard DWARF,
12912 yet. */
12913 if (!dwarf_strict)
12914 {
12915 /* Describe the scale factor as a rational constant. */
12916 const dw_die_ref scale_factor
12917 = new_die (DW_TAG_constant, comp_unit_die (), type);
12918
12919 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12920 fpt_info.scale_factor.arbitrary.numerator);
12921 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12922 fpt_info.scale_factor.arbitrary.denominator);
12923
12924 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12925 }
12926 break;
12927
12928 default:
12929 gcc_unreachable ();
12930 }
12931 }
12932
12933 if (type_bias)
12934 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12935 dw_scalar_form_constant
12936 | dw_scalar_form_exprloc
12937 | dw_scalar_form_reference,
12938 NULL);
12939
12940 return base_type_result;
12941 }
12942
12943 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12944 named 'auto' in its type: return true for it, false otherwise. */
12945
12946 static inline bool
12947 is_cxx_auto (tree type)
12948 {
12949 if (is_cxx ())
12950 {
12951 tree name = TYPE_IDENTIFIER (type);
12952 if (name == get_identifier ("auto")
12953 || name == get_identifier ("decltype(auto)"))
12954 return true;
12955 }
12956 return false;
12957 }
12958
12959 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12960 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12961
12962 static inline int
12963 is_base_type (tree type)
12964 {
12965 switch (TREE_CODE (type))
12966 {
12967 case INTEGER_TYPE:
12968 case REAL_TYPE:
12969 case FIXED_POINT_TYPE:
12970 case COMPLEX_TYPE:
12971 case BOOLEAN_TYPE:
12972 return 1;
12973
12974 case VOID_TYPE:
12975 case ARRAY_TYPE:
12976 case RECORD_TYPE:
12977 case UNION_TYPE:
12978 case QUAL_UNION_TYPE:
12979 case ENUMERAL_TYPE:
12980 case FUNCTION_TYPE:
12981 case METHOD_TYPE:
12982 case POINTER_TYPE:
12983 case REFERENCE_TYPE:
12984 case NULLPTR_TYPE:
12985 case OFFSET_TYPE:
12986 case LANG_TYPE:
12987 case VECTOR_TYPE:
12988 return 0;
12989
12990 default:
12991 if (is_cxx_auto (type))
12992 return 0;
12993 gcc_unreachable ();
12994 }
12995
12996 return 0;
12997 }
12998
12999 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
13000 node, return the size in bits for the type if it is a constant, or else
13001 return the alignment for the type if the type's size is not constant, or
13002 else return BITS_PER_WORD if the type actually turns out to be an
13003 ERROR_MARK node. */
13004
13005 static inline unsigned HOST_WIDE_INT
13006 simple_type_size_in_bits (const_tree type)
13007 {
13008 if (TREE_CODE (type) == ERROR_MARK)
13009 return BITS_PER_WORD;
13010 else if (TYPE_SIZE (type) == NULL_TREE)
13011 return 0;
13012 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13013 return tree_to_uhwi (TYPE_SIZE (type));
13014 else
13015 return TYPE_ALIGN (type);
13016 }
13017
13018 /* Similarly, but return an offset_int instead of UHWI. */
13019
13020 static inline offset_int
13021 offset_int_type_size_in_bits (const_tree type)
13022 {
13023 if (TREE_CODE (type) == ERROR_MARK)
13024 return BITS_PER_WORD;
13025 else if (TYPE_SIZE (type) == NULL_TREE)
13026 return 0;
13027 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13028 return wi::to_offset (TYPE_SIZE (type));
13029 else
13030 return TYPE_ALIGN (type);
13031 }
13032
13033 /* Given a pointer to a tree node for a subrange type, return a pointer
13034 to a DIE that describes the given type. */
13035
13036 static dw_die_ref
13037 subrange_type_die (tree type, tree low, tree high, tree bias,
13038 dw_die_ref context_die)
13039 {
13040 dw_die_ref subrange_die;
13041 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13042
13043 if (context_die == NULL)
13044 context_die = comp_unit_die ();
13045
13046 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13047
13048 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13049 {
13050 /* The size of the subrange type and its base type do not match,
13051 so we need to generate a size attribute for the subrange type. */
13052 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13053 }
13054
13055 add_alignment_attribute (subrange_die, type);
13056
13057 if (low)
13058 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13059 if (high)
13060 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13061 if (bias && !dwarf_strict)
13062 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13063 dw_scalar_form_constant
13064 | dw_scalar_form_exprloc
13065 | dw_scalar_form_reference,
13066 NULL);
13067
13068 return subrange_die;
13069 }
13070
13071 /* Returns the (const and/or volatile) cv_qualifiers associated with
13072 the decl node. This will normally be augmented with the
13073 cv_qualifiers of the underlying type in add_type_attribute. */
13074
13075 static int
13076 decl_quals (const_tree decl)
13077 {
13078 return ((TREE_READONLY (decl)
13079 /* The C++ front-end correctly marks reference-typed
13080 variables as readonly, but from a language (and debug
13081 info) standpoint they are not const-qualified. */
13082 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13083 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13084 | (TREE_THIS_VOLATILE (decl)
13085 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13086 }
13087
13088 /* Determine the TYPE whose qualifiers match the largest strict subset
13089 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13090 qualifiers outside QUAL_MASK. */
13091
13092 static int
13093 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13094 {
13095 tree t;
13096 int best_rank = 0, best_qual = 0, max_rank;
13097
13098 type_quals &= qual_mask;
13099 max_rank = popcount_hwi (type_quals) - 1;
13100
13101 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13102 t = TYPE_NEXT_VARIANT (t))
13103 {
13104 int q = TYPE_QUALS (t) & qual_mask;
13105
13106 if ((q & type_quals) == q && q != type_quals
13107 && check_base_type (t, type))
13108 {
13109 int rank = popcount_hwi (q);
13110
13111 if (rank > best_rank)
13112 {
13113 best_rank = rank;
13114 best_qual = q;
13115 }
13116 }
13117 }
13118
13119 return best_qual;
13120 }
13121
13122 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13123 static const dwarf_qual_info_t dwarf_qual_info[] =
13124 {
13125 { TYPE_QUAL_CONST, DW_TAG_const_type },
13126 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13127 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13128 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13129 };
13130 static const unsigned int dwarf_qual_info_size
13131 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13132
13133 /* If DIE is a qualified DIE of some base DIE with the same parent,
13134 return the base DIE, otherwise return NULL. Set MASK to the
13135 qualifiers added compared to the returned DIE. */
13136
13137 static dw_die_ref
13138 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13139 {
13140 unsigned int i;
13141 for (i = 0; i < dwarf_qual_info_size; i++)
13142 if (die->die_tag == dwarf_qual_info[i].t)
13143 break;
13144 if (i == dwarf_qual_info_size)
13145 return NULL;
13146 if (vec_safe_length (die->die_attr) != 1)
13147 return NULL;
13148 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13149 if (type == NULL || type->die_parent != die->die_parent)
13150 return NULL;
13151 *mask |= dwarf_qual_info[i].q;
13152 if (depth)
13153 {
13154 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13155 if (ret)
13156 return ret;
13157 }
13158 return type;
13159 }
13160
13161 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13162 entry that chains the modifiers specified by CV_QUALS in front of the
13163 given type. REVERSE is true if the type is to be interpreted in the
13164 reverse storage order wrt the target order. */
13165
13166 static dw_die_ref
13167 modified_type_die (tree type, int cv_quals, bool reverse,
13168 dw_die_ref context_die)
13169 {
13170 enum tree_code code = TREE_CODE (type);
13171 dw_die_ref mod_type_die;
13172 dw_die_ref sub_die = NULL;
13173 tree item_type = NULL;
13174 tree qualified_type;
13175 tree name, low, high;
13176 dw_die_ref mod_scope;
13177 /* Only these cv-qualifiers are currently handled. */
13178 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13179 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13180 ENCODE_QUAL_ADDR_SPACE(~0U));
13181 const bool reverse_base_type
13182 = need_endianity_attribute_p (reverse) && is_base_type (type);
13183
13184 if (code == ERROR_MARK)
13185 return NULL;
13186
13187 if (lang_hooks.types.get_debug_type)
13188 {
13189 tree debug_type = lang_hooks.types.get_debug_type (type);
13190
13191 if (debug_type != NULL_TREE && debug_type != type)
13192 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13193 }
13194
13195 cv_quals &= cv_qual_mask;
13196
13197 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13198 tag modifier (and not an attribute) old consumers won't be able
13199 to handle it. */
13200 if (dwarf_version < 3)
13201 cv_quals &= ~TYPE_QUAL_RESTRICT;
13202
13203 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13204 if (dwarf_version < 5)
13205 cv_quals &= ~TYPE_QUAL_ATOMIC;
13206
13207 /* See if we already have the appropriately qualified variant of
13208 this type. */
13209 qualified_type = get_qualified_type (type, cv_quals);
13210
13211 if (qualified_type == sizetype)
13212 {
13213 /* Try not to expose the internal sizetype type's name. */
13214 if (TYPE_NAME (qualified_type)
13215 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13216 {
13217 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13218
13219 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13220 && (TYPE_PRECISION (t)
13221 == TYPE_PRECISION (qualified_type))
13222 && (TYPE_UNSIGNED (t)
13223 == TYPE_UNSIGNED (qualified_type)));
13224 qualified_type = t;
13225 }
13226 else if (qualified_type == sizetype
13227 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13228 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13229 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13230 qualified_type = size_type_node;
13231 if (type == sizetype)
13232 type = qualified_type;
13233 }
13234
13235 /* If we do, then we can just use its DIE, if it exists. */
13236 if (qualified_type)
13237 {
13238 mod_type_die = lookup_type_die (qualified_type);
13239
13240 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13241 dealt with specially: the DIE with the attribute, if it exists, is
13242 placed immediately after the regular DIE for the same base type. */
13243 if (mod_type_die
13244 && (!reverse_base_type
13245 || ((mod_type_die = mod_type_die->die_sib) != NULL
13246 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13247 return mod_type_die;
13248 }
13249
13250 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13251
13252 /* Handle C typedef types. */
13253 if (name
13254 && TREE_CODE (name) == TYPE_DECL
13255 && DECL_ORIGINAL_TYPE (name)
13256 && !DECL_ARTIFICIAL (name))
13257 {
13258 tree dtype = TREE_TYPE (name);
13259
13260 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13261 if (qualified_type == dtype && !reverse_base_type)
13262 {
13263 tree origin = decl_ultimate_origin (name);
13264
13265 /* Typedef variants that have an abstract origin don't get their own
13266 type DIE (see gen_typedef_die), so fall back on the ultimate
13267 abstract origin instead. */
13268 if (origin != NULL && origin != name)
13269 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13270 context_die);
13271
13272 /* For a named type, use the typedef. */
13273 gen_type_die (qualified_type, context_die);
13274 return lookup_type_die (qualified_type);
13275 }
13276 else
13277 {
13278 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13279 dquals &= cv_qual_mask;
13280 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13281 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13282 /* cv-unqualified version of named type. Just use
13283 the unnamed type to which it refers. */
13284 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13285 reverse, context_die);
13286 /* Else cv-qualified version of named type; fall through. */
13287 }
13288 }
13289
13290 mod_scope = scope_die_for (type, context_die);
13291
13292 if (cv_quals)
13293 {
13294 int sub_quals = 0, first_quals = 0;
13295 unsigned i;
13296 dw_die_ref first = NULL, last = NULL;
13297
13298 /* Determine a lesser qualified type that most closely matches
13299 this one. Then generate DW_TAG_* entries for the remaining
13300 qualifiers. */
13301 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13302 cv_qual_mask);
13303 if (sub_quals && use_debug_types)
13304 {
13305 bool needed = false;
13306 /* If emitting type units, make sure the order of qualifiers
13307 is canonical. Thus, start from unqualified type if
13308 an earlier qualifier is missing in sub_quals, but some later
13309 one is present there. */
13310 for (i = 0; i < dwarf_qual_info_size; i++)
13311 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13312 needed = true;
13313 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13314 {
13315 sub_quals = 0;
13316 break;
13317 }
13318 }
13319 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13320 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13321 {
13322 /* As not all intermediate qualified DIEs have corresponding
13323 tree types, ensure that qualified DIEs in the same scope
13324 as their DW_AT_type are emitted after their DW_AT_type,
13325 only with other qualified DIEs for the same type possibly
13326 in between them. Determine the range of such qualified
13327 DIEs now (first being the base type, last being corresponding
13328 last qualified DIE for it). */
13329 unsigned int count = 0;
13330 first = qualified_die_p (mod_type_die, &first_quals,
13331 dwarf_qual_info_size);
13332 if (first == NULL)
13333 first = mod_type_die;
13334 gcc_assert ((first_quals & ~sub_quals) == 0);
13335 for (count = 0, last = first;
13336 count < (1U << dwarf_qual_info_size);
13337 count++, last = last->die_sib)
13338 {
13339 int quals = 0;
13340 if (last == mod_scope->die_child)
13341 break;
13342 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13343 != first)
13344 break;
13345 }
13346 }
13347
13348 for (i = 0; i < dwarf_qual_info_size; i++)
13349 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13350 {
13351 dw_die_ref d;
13352 if (first && first != last)
13353 {
13354 for (d = first->die_sib; ; d = d->die_sib)
13355 {
13356 int quals = 0;
13357 qualified_die_p (d, &quals, dwarf_qual_info_size);
13358 if (quals == (first_quals | dwarf_qual_info[i].q))
13359 break;
13360 if (d == last)
13361 {
13362 d = NULL;
13363 break;
13364 }
13365 }
13366 if (d)
13367 {
13368 mod_type_die = d;
13369 continue;
13370 }
13371 }
13372 if (first)
13373 {
13374 d = new_die_raw (dwarf_qual_info[i].t);
13375 add_child_die_after (mod_scope, d, last);
13376 last = d;
13377 }
13378 else
13379 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13380 if (mod_type_die)
13381 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13382 mod_type_die = d;
13383 first_quals |= dwarf_qual_info[i].q;
13384 }
13385 }
13386 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13387 {
13388 dwarf_tag tag = DW_TAG_pointer_type;
13389 if (code == REFERENCE_TYPE)
13390 {
13391 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13392 tag = DW_TAG_rvalue_reference_type;
13393 else
13394 tag = DW_TAG_reference_type;
13395 }
13396 mod_type_die = new_die (tag, mod_scope, type);
13397
13398 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13399 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13400 add_alignment_attribute (mod_type_die, type);
13401 item_type = TREE_TYPE (type);
13402
13403 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13404 if (!ADDR_SPACE_GENERIC_P (as))
13405 {
13406 int action = targetm.addr_space.debug (as);
13407 if (action >= 0)
13408 {
13409 /* Positive values indicate an address_class. */
13410 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13411 }
13412 else
13413 {
13414 /* Negative values indicate an (inverted) segment base reg. */
13415 dw_loc_descr_ref d
13416 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13417 add_AT_loc (mod_type_die, DW_AT_segment, d);
13418 }
13419 }
13420 }
13421 else if (code == INTEGER_TYPE
13422 && TREE_TYPE (type) != NULL_TREE
13423 && subrange_type_for_debug_p (type, &low, &high))
13424 {
13425 tree bias = NULL_TREE;
13426 if (lang_hooks.types.get_type_bias)
13427 bias = lang_hooks.types.get_type_bias (type);
13428 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13429 item_type = TREE_TYPE (type);
13430 }
13431 else if (is_base_type (type))
13432 {
13433 mod_type_die = base_type_die (type, reverse);
13434
13435 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13436 if (reverse_base_type)
13437 {
13438 dw_die_ref after_die
13439 = modified_type_die (type, cv_quals, false, context_die);
13440 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13441 }
13442 else
13443 add_child_die (comp_unit_die (), mod_type_die);
13444
13445 add_pubtype (type, mod_type_die);
13446 }
13447 else
13448 {
13449 gen_type_die (type, context_die);
13450
13451 /* We have to get the type_main_variant here (and pass that to the
13452 `lookup_type_die' routine) because the ..._TYPE node we have
13453 might simply be a *copy* of some original type node (where the
13454 copy was created to help us keep track of typedef names) and
13455 that copy might have a different TYPE_UID from the original
13456 ..._TYPE node. */
13457 if (TREE_CODE (type) == FUNCTION_TYPE
13458 || TREE_CODE (type) == METHOD_TYPE)
13459 {
13460 /* For function/method types, can't just use type_main_variant here,
13461 because that can have different ref-qualifiers for C++,
13462 but try to canonicalize. */
13463 tree main = TYPE_MAIN_VARIANT (type);
13464 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13465 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13466 && check_base_type (t, main)
13467 && check_lang_type (t, type))
13468 return lookup_type_die (t);
13469 return lookup_type_die (type);
13470 }
13471 else if (TREE_CODE (type) != VECTOR_TYPE
13472 && TREE_CODE (type) != ARRAY_TYPE)
13473 return lookup_type_die (type_main_variant (type));
13474 else
13475 /* Vectors have the debugging information in the type,
13476 not the main variant. */
13477 return lookup_type_die (type);
13478 }
13479
13480 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13481 don't output a DW_TAG_typedef, since there isn't one in the
13482 user's program; just attach a DW_AT_name to the type.
13483 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13484 if the base type already has the same name. */
13485 if (name
13486 && ((TREE_CODE (name) != TYPE_DECL
13487 && (qualified_type == TYPE_MAIN_VARIANT (type)
13488 || (cv_quals == TYPE_UNQUALIFIED)))
13489 || (TREE_CODE (name) == TYPE_DECL
13490 && TREE_TYPE (name) == qualified_type
13491 && DECL_NAME (name))))
13492 {
13493 if (TREE_CODE (name) == TYPE_DECL)
13494 /* Could just call add_name_and_src_coords_attributes here,
13495 but since this is a builtin type it doesn't have any
13496 useful source coordinates anyway. */
13497 name = DECL_NAME (name);
13498 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13499 }
13500 /* This probably indicates a bug. */
13501 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13502 {
13503 name = TYPE_IDENTIFIER (type);
13504 add_name_attribute (mod_type_die,
13505 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13506 }
13507
13508 if (qualified_type && !reverse_base_type)
13509 equate_type_number_to_die (qualified_type, mod_type_die);
13510
13511 if (item_type)
13512 /* We must do this after the equate_type_number_to_die call, in case
13513 this is a recursive type. This ensures that the modified_type_die
13514 recursion will terminate even if the type is recursive. Recursive
13515 types are possible in Ada. */
13516 sub_die = modified_type_die (item_type,
13517 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13518 reverse,
13519 context_die);
13520
13521 if (sub_die != NULL)
13522 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13523
13524 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13525 if (TYPE_ARTIFICIAL (type))
13526 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13527
13528 return mod_type_die;
13529 }
13530
13531 /* Generate DIEs for the generic parameters of T.
13532 T must be either a generic type or a generic function.
13533 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13534
13535 static void
13536 gen_generic_params_dies (tree t)
13537 {
13538 tree parms, args;
13539 int parms_num, i;
13540 dw_die_ref die = NULL;
13541 int non_default;
13542
13543 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13544 return;
13545
13546 if (TYPE_P (t))
13547 die = lookup_type_die (t);
13548 else if (DECL_P (t))
13549 die = lookup_decl_die (t);
13550
13551 gcc_assert (die);
13552
13553 parms = lang_hooks.get_innermost_generic_parms (t);
13554 if (!parms)
13555 /* T has no generic parameter. It means T is neither a generic type
13556 or function. End of story. */
13557 return;
13558
13559 parms_num = TREE_VEC_LENGTH (parms);
13560 args = lang_hooks.get_innermost_generic_args (t);
13561 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13562 non_default = int_cst_value (TREE_CHAIN (args));
13563 else
13564 non_default = TREE_VEC_LENGTH (args);
13565 for (i = 0; i < parms_num; i++)
13566 {
13567 tree parm, arg, arg_pack_elems;
13568 dw_die_ref parm_die;
13569
13570 parm = TREE_VEC_ELT (parms, i);
13571 arg = TREE_VEC_ELT (args, i);
13572 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13573 gcc_assert (parm && TREE_VALUE (parm) && arg);
13574
13575 if (parm && TREE_VALUE (parm) && arg)
13576 {
13577 /* If PARM represents a template parameter pack,
13578 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13579 by DW_TAG_template_*_parameter DIEs for the argument
13580 pack elements of ARG. Note that ARG would then be
13581 an argument pack. */
13582 if (arg_pack_elems)
13583 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13584 arg_pack_elems,
13585 die);
13586 else
13587 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13588 true /* emit name */, die);
13589 if (i >= non_default)
13590 add_AT_flag (parm_die, DW_AT_default_value, 1);
13591 }
13592 }
13593 }
13594
13595 /* Create and return a DIE for PARM which should be
13596 the representation of a generic type parameter.
13597 For instance, in the C++ front end, PARM would be a template parameter.
13598 ARG is the argument to PARM.
13599 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13600 name of the PARM.
13601 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13602 as a child node. */
13603
13604 static dw_die_ref
13605 generic_parameter_die (tree parm, tree arg,
13606 bool emit_name_p,
13607 dw_die_ref parent_die)
13608 {
13609 dw_die_ref tmpl_die = NULL;
13610 const char *name = NULL;
13611
13612 /* C++20 accepts class literals as template parameters, and var
13613 decls with initializers represent them. The VAR_DECLs would be
13614 rejected, but we can take the DECL_INITIAL constructor and
13615 attempt to expand it. */
13616 if (arg && VAR_P (arg))
13617 arg = DECL_INITIAL (arg);
13618
13619 if (!parm || !DECL_NAME (parm) || !arg)
13620 return NULL;
13621
13622 /* We support non-type generic parameters and arguments,
13623 type generic parameters and arguments, as well as
13624 generic generic parameters (a.k.a. template template parameters in C++)
13625 and arguments. */
13626 if (TREE_CODE (parm) == PARM_DECL)
13627 /* PARM is a nontype generic parameter */
13628 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13629 else if (TREE_CODE (parm) == TYPE_DECL)
13630 /* PARM is a type generic parameter. */
13631 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13632 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13633 /* PARM is a generic generic parameter.
13634 Its DIE is a GNU extension. It shall have a
13635 DW_AT_name attribute to represent the name of the template template
13636 parameter, and a DW_AT_GNU_template_name attribute to represent the
13637 name of the template template argument. */
13638 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13639 parent_die, parm);
13640 else
13641 gcc_unreachable ();
13642
13643 if (tmpl_die)
13644 {
13645 tree tmpl_type;
13646
13647 /* If PARM is a generic parameter pack, it means we are
13648 emitting debug info for a template argument pack element.
13649 In other terms, ARG is a template argument pack element.
13650 In that case, we don't emit any DW_AT_name attribute for
13651 the die. */
13652 if (emit_name_p)
13653 {
13654 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13655 gcc_assert (name);
13656 add_AT_string (tmpl_die, DW_AT_name, name);
13657 }
13658
13659 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13660 {
13661 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13662 TMPL_DIE should have a child DW_AT_type attribute that is set
13663 to the type of the argument to PARM, which is ARG.
13664 If PARM is a type generic parameter, TMPL_DIE should have a
13665 child DW_AT_type that is set to ARG. */
13666 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13667 add_type_attribute (tmpl_die, tmpl_type,
13668 (TREE_THIS_VOLATILE (tmpl_type)
13669 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13670 false, parent_die);
13671 }
13672 else
13673 {
13674 /* So TMPL_DIE is a DIE representing a
13675 a generic generic template parameter, a.k.a template template
13676 parameter in C++ and arg is a template. */
13677
13678 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13679 to the name of the argument. */
13680 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13681 if (name)
13682 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13683 }
13684
13685 if (TREE_CODE (parm) == PARM_DECL)
13686 /* So PARM is a non-type generic parameter.
13687 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13688 attribute of TMPL_DIE which value represents the value
13689 of ARG.
13690 We must be careful here:
13691 The value of ARG might reference some function decls.
13692 We might currently be emitting debug info for a generic
13693 type and types are emitted before function decls, we don't
13694 know if the function decls referenced by ARG will actually be
13695 emitted after cgraph computations.
13696 So must defer the generation of the DW_AT_const_value to
13697 after cgraph is ready. */
13698 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13699 }
13700
13701 return tmpl_die;
13702 }
13703
13704 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13705 PARM_PACK must be a template parameter pack. The returned DIE
13706 will be child DIE of PARENT_DIE. */
13707
13708 static dw_die_ref
13709 template_parameter_pack_die (tree parm_pack,
13710 tree parm_pack_args,
13711 dw_die_ref parent_die)
13712 {
13713 dw_die_ref die;
13714 int j;
13715
13716 gcc_assert (parent_die && parm_pack);
13717
13718 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13719 add_name_and_src_coords_attributes (die, parm_pack);
13720 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13721 generic_parameter_die (parm_pack,
13722 TREE_VEC_ELT (parm_pack_args, j),
13723 false /* Don't emit DW_AT_name */,
13724 die);
13725 return die;
13726 }
13727
13728 /* Return the DBX register number described by a given RTL node. */
13729
13730 static unsigned int
13731 dbx_reg_number (const_rtx rtl)
13732 {
13733 unsigned regno = REGNO (rtl);
13734
13735 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13736
13737 #ifdef LEAF_REG_REMAP
13738 if (crtl->uses_only_leaf_regs)
13739 {
13740 int leaf_reg = LEAF_REG_REMAP (regno);
13741 if (leaf_reg != -1)
13742 regno = (unsigned) leaf_reg;
13743 }
13744 #endif
13745
13746 regno = DBX_REGISTER_NUMBER (regno);
13747 gcc_assert (regno != INVALID_REGNUM);
13748 return regno;
13749 }
13750
13751 /* Optionally add a DW_OP_piece term to a location description expression.
13752 DW_OP_piece is only added if the location description expression already
13753 doesn't end with DW_OP_piece. */
13754
13755 static void
13756 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13757 {
13758 dw_loc_descr_ref loc;
13759
13760 if (*list_head != NULL)
13761 {
13762 /* Find the end of the chain. */
13763 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13764 ;
13765
13766 if (loc->dw_loc_opc != DW_OP_piece)
13767 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13768 }
13769 }
13770
13771 /* Return a location descriptor that designates a machine register or
13772 zero if there is none. */
13773
13774 static dw_loc_descr_ref
13775 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13776 {
13777 rtx regs;
13778
13779 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13780 return 0;
13781
13782 /* We only use "frame base" when we're sure we're talking about the
13783 post-prologue local stack frame. We do this by *not* running
13784 register elimination until this point, and recognizing the special
13785 argument pointer and soft frame pointer rtx's.
13786 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13787 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13788 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13789 {
13790 dw_loc_descr_ref result = NULL;
13791
13792 if (dwarf_version >= 4 || !dwarf_strict)
13793 {
13794 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13795 initialized);
13796 if (result)
13797 add_loc_descr (&result,
13798 new_loc_descr (DW_OP_stack_value, 0, 0));
13799 }
13800 return result;
13801 }
13802
13803 regs = targetm.dwarf_register_span (rtl);
13804
13805 if (REG_NREGS (rtl) > 1 || regs)
13806 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13807 else
13808 {
13809 unsigned int dbx_regnum = dbx_reg_number (rtl);
13810 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13811 return 0;
13812 return one_reg_loc_descriptor (dbx_regnum, initialized);
13813 }
13814 }
13815
13816 /* Return a location descriptor that designates a machine register for
13817 a given hard register number. */
13818
13819 static dw_loc_descr_ref
13820 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13821 {
13822 dw_loc_descr_ref reg_loc_descr;
13823
13824 if (regno <= 31)
13825 reg_loc_descr
13826 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13827 else
13828 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13829
13830 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13831 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13832
13833 return reg_loc_descr;
13834 }
13835
13836 /* Given an RTL of a register, return a location descriptor that
13837 designates a value that spans more than one register. */
13838
13839 static dw_loc_descr_ref
13840 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13841 enum var_init_status initialized)
13842 {
13843 int size, i;
13844 dw_loc_descr_ref loc_result = NULL;
13845
13846 /* Simple, contiguous registers. */
13847 if (regs == NULL_RTX)
13848 {
13849 unsigned reg = REGNO (rtl);
13850 int nregs;
13851
13852 #ifdef LEAF_REG_REMAP
13853 if (crtl->uses_only_leaf_regs)
13854 {
13855 int leaf_reg = LEAF_REG_REMAP (reg);
13856 if (leaf_reg != -1)
13857 reg = (unsigned) leaf_reg;
13858 }
13859 #endif
13860
13861 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13862 nregs = REG_NREGS (rtl);
13863
13864 /* At present we only track constant-sized pieces. */
13865 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13866 return NULL;
13867 size /= nregs;
13868
13869 loc_result = NULL;
13870 while (nregs--)
13871 {
13872 dw_loc_descr_ref t;
13873
13874 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13875 VAR_INIT_STATUS_INITIALIZED);
13876 add_loc_descr (&loc_result, t);
13877 add_loc_descr_op_piece (&loc_result, size);
13878 ++reg;
13879 }
13880 return loc_result;
13881 }
13882
13883 /* Now onto stupid register sets in non contiguous locations. */
13884
13885 gcc_assert (GET_CODE (regs) == PARALLEL);
13886
13887 /* At present we only track constant-sized pieces. */
13888 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13889 return NULL;
13890 loc_result = NULL;
13891
13892 for (i = 0; i < XVECLEN (regs, 0); ++i)
13893 {
13894 dw_loc_descr_ref t;
13895
13896 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13897 VAR_INIT_STATUS_INITIALIZED);
13898 add_loc_descr (&loc_result, t);
13899 add_loc_descr_op_piece (&loc_result, size);
13900 }
13901
13902 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13903 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13904 return loc_result;
13905 }
13906
13907 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13908
13909 /* Return a location descriptor that designates a constant i,
13910 as a compound operation from constant (i >> shift), constant shift
13911 and DW_OP_shl. */
13912
13913 static dw_loc_descr_ref
13914 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13915 {
13916 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13917 add_loc_descr (&ret, int_loc_descriptor (shift));
13918 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13919 return ret;
13920 }
13921
13922 /* Return a location descriptor that designates constant POLY_I. */
13923
13924 static dw_loc_descr_ref
13925 int_loc_descriptor (poly_int64 poly_i)
13926 {
13927 enum dwarf_location_atom op;
13928
13929 HOST_WIDE_INT i;
13930 if (!poly_i.is_constant (&i))
13931 {
13932 /* Create location descriptions for the non-constant part and
13933 add any constant offset at the end. */
13934 dw_loc_descr_ref ret = NULL;
13935 HOST_WIDE_INT constant = poly_i.coeffs[0];
13936 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13937 {
13938 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13939 if (coeff != 0)
13940 {
13941 dw_loc_descr_ref start = ret;
13942 unsigned int factor;
13943 int bias;
13944 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13945 (j, &factor, &bias);
13946
13947 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13948 add COEFF * (REGNO / FACTOR) now and subtract
13949 COEFF * BIAS from the final constant part. */
13950 constant -= coeff * bias;
13951 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13952 if (coeff % factor == 0)
13953 coeff /= factor;
13954 else
13955 {
13956 int amount = exact_log2 (factor);
13957 gcc_assert (amount >= 0);
13958 add_loc_descr (&ret, int_loc_descriptor (amount));
13959 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13960 }
13961 if (coeff != 1)
13962 {
13963 add_loc_descr (&ret, int_loc_descriptor (coeff));
13964 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13965 }
13966 if (start)
13967 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13968 }
13969 }
13970 loc_descr_plus_const (&ret, constant);
13971 return ret;
13972 }
13973
13974 /* Pick the smallest representation of a constant, rather than just
13975 defaulting to the LEB encoding. */
13976 if (i >= 0)
13977 {
13978 int clz = clz_hwi (i);
13979 int ctz = ctz_hwi (i);
13980 if (i <= 31)
13981 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13982 else if (i <= 0xff)
13983 op = DW_OP_const1u;
13984 else if (i <= 0xffff)
13985 op = DW_OP_const2u;
13986 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13987 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13988 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13989 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13990 while DW_OP_const4u is 5 bytes. */
13991 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13992 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13993 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13994 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13995 while DW_OP_const4u is 5 bytes. */
13996 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13997
13998 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13999 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14000 <= 4)
14001 {
14002 /* As i >= 2**31, the double cast above will yield a negative number.
14003 Since wrapping is defined in DWARF expressions we can output big
14004 positive integers as small negative ones, regardless of the size
14005 of host wide ints.
14006
14007 Here, since the evaluator will handle 32-bit values and since i >=
14008 2**31, we know it's going to be interpreted as a negative literal:
14009 store it this way if we can do better than 5 bytes this way. */
14010 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14011 }
14012 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14013 op = DW_OP_const4u;
14014
14015 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14016 least 6 bytes: see if we can do better before falling back to it. */
14017 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14018 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14019 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14020 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14021 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14022 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14023 >= HOST_BITS_PER_WIDE_INT)
14024 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14025 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14026 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14027 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14028 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14029 && size_of_uleb128 (i) > 6)
14030 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14031 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14032 else
14033 op = DW_OP_constu;
14034 }
14035 else
14036 {
14037 if (i >= -0x80)
14038 op = DW_OP_const1s;
14039 else if (i >= -0x8000)
14040 op = DW_OP_const2s;
14041 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14042 {
14043 if (size_of_int_loc_descriptor (i) < 5)
14044 {
14045 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14046 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14047 return ret;
14048 }
14049 op = DW_OP_const4s;
14050 }
14051 else
14052 {
14053 if (size_of_int_loc_descriptor (i)
14054 < (unsigned long) 1 + size_of_sleb128 (i))
14055 {
14056 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14057 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14058 return ret;
14059 }
14060 op = DW_OP_consts;
14061 }
14062 }
14063
14064 return new_loc_descr (op, i, 0);
14065 }
14066
14067 /* Likewise, for unsigned constants. */
14068
14069 static dw_loc_descr_ref
14070 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14071 {
14072 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14073 const unsigned HOST_WIDE_INT max_uint
14074 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14075
14076 /* If possible, use the clever signed constants handling. */
14077 if (i <= max_int)
14078 return int_loc_descriptor ((HOST_WIDE_INT) i);
14079
14080 /* Here, we are left with positive numbers that cannot be represented as
14081 HOST_WIDE_INT, i.e.:
14082 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14083
14084 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14085 whereas may be better to output a negative integer: thanks to integer
14086 wrapping, we know that:
14087 x = x - 2 ** DWARF2_ADDR_SIZE
14088 = x - 2 * (max (HOST_WIDE_INT) + 1)
14089 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14090 small negative integers. Let's try that in cases it will clearly improve
14091 the encoding: there is no gain turning DW_OP_const4u into
14092 DW_OP_const4s. */
14093 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14094 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14095 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14096 {
14097 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14098
14099 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14100 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14101 const HOST_WIDE_INT second_shift
14102 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14103
14104 /* So we finally have:
14105 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14106 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14107 return int_loc_descriptor (second_shift);
14108 }
14109
14110 /* Last chance: fallback to a simple constant operation. */
14111 return new_loc_descr
14112 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14113 ? DW_OP_const4u
14114 : DW_OP_const8u,
14115 i, 0);
14116 }
14117
14118 /* Generate and return a location description that computes the unsigned
14119 comparison of the two stack top entries (a OP b where b is the top-most
14120 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14121 LE_EXPR, GT_EXPR or GE_EXPR. */
14122
14123 static dw_loc_descr_ref
14124 uint_comparison_loc_list (enum tree_code kind)
14125 {
14126 enum dwarf_location_atom op, flip_op;
14127 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14128
14129 switch (kind)
14130 {
14131 case LT_EXPR:
14132 op = DW_OP_lt;
14133 break;
14134 case LE_EXPR:
14135 op = DW_OP_le;
14136 break;
14137 case GT_EXPR:
14138 op = DW_OP_gt;
14139 break;
14140 case GE_EXPR:
14141 op = DW_OP_ge;
14142 break;
14143 default:
14144 gcc_unreachable ();
14145 }
14146
14147 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14148 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14149
14150 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14151 possible to perform unsigned comparisons: we just have to distinguish
14152 three cases:
14153
14154 1. when a and b have the same sign (as signed integers); then we should
14155 return: a OP(signed) b;
14156
14157 2. when a is a negative signed integer while b is a positive one, then a
14158 is a greater unsigned integer than b; likewise when a and b's roles
14159 are flipped.
14160
14161 So first, compare the sign of the two operands. */
14162 ret = new_loc_descr (DW_OP_over, 0, 0);
14163 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14164 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14165 /* If they have different signs (i.e. they have different sign bits), then
14166 the stack top value has now the sign bit set and thus it's smaller than
14167 zero. */
14168 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14169 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14170 add_loc_descr (&ret, bra_node);
14171
14172 /* We are in case 1. At this point, we know both operands have the same
14173 sign, to it's safe to use the built-in signed comparison. */
14174 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14175 add_loc_descr (&ret, jmp_node);
14176
14177 /* We are in case 2. Here, we know both operands do not have the same sign,
14178 so we have to flip the signed comparison. */
14179 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14180 tmp = new_loc_descr (flip_op, 0, 0);
14181 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14182 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14183 add_loc_descr (&ret, tmp);
14184
14185 /* This dummy operation is necessary to make the two branches join. */
14186 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14187 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14188 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14189 add_loc_descr (&ret, tmp);
14190
14191 return ret;
14192 }
14193
14194 /* Likewise, but takes the location description lists (might be destructive on
14195 them). Return NULL if either is NULL or if concatenation fails. */
14196
14197 static dw_loc_list_ref
14198 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14199 enum tree_code kind)
14200 {
14201 if (left == NULL || right == NULL)
14202 return NULL;
14203
14204 add_loc_list (&left, right);
14205 if (left == NULL)
14206 return NULL;
14207
14208 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14209 return left;
14210 }
14211
14212 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14213 without actually allocating it. */
14214
14215 static unsigned long
14216 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14217 {
14218 return size_of_int_loc_descriptor (i >> shift)
14219 + size_of_int_loc_descriptor (shift)
14220 + 1;
14221 }
14222
14223 /* Return size_of_locs (int_loc_descriptor (i)) without
14224 actually allocating it. */
14225
14226 static unsigned long
14227 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14228 {
14229 unsigned long s;
14230
14231 if (i >= 0)
14232 {
14233 int clz, ctz;
14234 if (i <= 31)
14235 return 1;
14236 else if (i <= 0xff)
14237 return 2;
14238 else if (i <= 0xffff)
14239 return 3;
14240 clz = clz_hwi (i);
14241 ctz = ctz_hwi (i);
14242 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14243 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14244 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14245 - clz - 5);
14246 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14247 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14248 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14249 - clz - 8);
14250 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14251 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14252 <= 4)
14253 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14254 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14255 return 5;
14256 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14257 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14258 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14259 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14260 - clz - 8);
14261 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14262 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14263 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14264 - clz - 16);
14265 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14266 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14267 && s > 6)
14268 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14269 - clz - 32);
14270 else
14271 return 1 + s;
14272 }
14273 else
14274 {
14275 if (i >= -0x80)
14276 return 2;
14277 else if (i >= -0x8000)
14278 return 3;
14279 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14280 {
14281 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14282 {
14283 s = size_of_int_loc_descriptor (-i) + 1;
14284 if (s < 5)
14285 return s;
14286 }
14287 return 5;
14288 }
14289 else
14290 {
14291 unsigned long r = 1 + size_of_sleb128 (i);
14292 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14293 {
14294 s = size_of_int_loc_descriptor (-i) + 1;
14295 if (s < r)
14296 return s;
14297 }
14298 return r;
14299 }
14300 }
14301 }
14302
14303 /* Return loc description representing "address" of integer value.
14304 This can appear only as toplevel expression. */
14305
14306 static dw_loc_descr_ref
14307 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14308 {
14309 int litsize;
14310 dw_loc_descr_ref loc_result = NULL;
14311
14312 if (!(dwarf_version >= 4 || !dwarf_strict))
14313 return NULL;
14314
14315 litsize = size_of_int_loc_descriptor (i);
14316 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14317 is more compact. For DW_OP_stack_value we need:
14318 litsize + 1 (DW_OP_stack_value)
14319 and for DW_OP_implicit_value:
14320 1 (DW_OP_implicit_value) + 1 (length) + size. */
14321 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14322 {
14323 loc_result = int_loc_descriptor (i);
14324 add_loc_descr (&loc_result,
14325 new_loc_descr (DW_OP_stack_value, 0, 0));
14326 return loc_result;
14327 }
14328
14329 loc_result = new_loc_descr (DW_OP_implicit_value,
14330 size, 0);
14331 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14332 loc_result->dw_loc_oprnd2.v.val_int = i;
14333 return loc_result;
14334 }
14335
14336 /* Return a location descriptor that designates a base+offset location. */
14337
14338 static dw_loc_descr_ref
14339 based_loc_descr (rtx reg, poly_int64 offset,
14340 enum var_init_status initialized)
14341 {
14342 unsigned int regno;
14343 dw_loc_descr_ref result;
14344 dw_fde_ref fde = cfun->fde;
14345
14346 /* We only use "frame base" when we're sure we're talking about the
14347 post-prologue local stack frame. We do this by *not* running
14348 register elimination until this point, and recognizing the special
14349 argument pointer and soft frame pointer rtx's. */
14350 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14351 {
14352 rtx elim = (ira_use_lra_p
14353 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14354 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14355
14356 if (elim != reg)
14357 {
14358 /* Allow hard frame pointer here even if frame pointer
14359 isn't used since hard frame pointer is encoded with
14360 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14361 not hard frame pointer directly. */
14362 elim = strip_offset_and_add (elim, &offset);
14363 gcc_assert (elim == hard_frame_pointer_rtx
14364 || elim == stack_pointer_rtx);
14365
14366 /* If drap register is used to align stack, use frame
14367 pointer + offset to access stack variables. If stack
14368 is aligned without drap, use stack pointer + offset to
14369 access stack variables. */
14370 if (crtl->stack_realign_tried
14371 && reg == frame_pointer_rtx)
14372 {
14373 int base_reg
14374 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14375 ? HARD_FRAME_POINTER_REGNUM
14376 : REGNO (elim));
14377 return new_reg_loc_descr (base_reg, offset);
14378 }
14379
14380 gcc_assert (frame_pointer_fb_offset_valid);
14381 offset += frame_pointer_fb_offset;
14382 HOST_WIDE_INT const_offset;
14383 if (offset.is_constant (&const_offset))
14384 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14385 else
14386 {
14387 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14388 loc_descr_plus_const (&ret, offset);
14389 return ret;
14390 }
14391 }
14392 }
14393
14394 regno = REGNO (reg);
14395 #ifdef LEAF_REG_REMAP
14396 if (crtl->uses_only_leaf_regs)
14397 {
14398 int leaf_reg = LEAF_REG_REMAP (regno);
14399 if (leaf_reg != -1)
14400 regno = (unsigned) leaf_reg;
14401 }
14402 #endif
14403 regno = DWARF_FRAME_REGNUM (regno);
14404
14405 HOST_WIDE_INT const_offset;
14406 if (!optimize && fde
14407 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14408 && offset.is_constant (&const_offset))
14409 {
14410 /* Use cfa+offset to represent the location of arguments passed
14411 on the stack when drap is used to align stack.
14412 Only do this when not optimizing, for optimized code var-tracking
14413 is supposed to track where the arguments live and the register
14414 used as vdrap or drap in some spot might be used for something
14415 else in other part of the routine. */
14416 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14417 }
14418
14419 result = new_reg_loc_descr (regno, offset);
14420
14421 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14422 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14423
14424 return result;
14425 }
14426
14427 /* Return true if this RTL expression describes a base+offset calculation. */
14428
14429 static inline int
14430 is_based_loc (const_rtx rtl)
14431 {
14432 return (GET_CODE (rtl) == PLUS
14433 && ((REG_P (XEXP (rtl, 0))
14434 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14435 && CONST_INT_P (XEXP (rtl, 1)))));
14436 }
14437
14438 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14439 failed. */
14440
14441 static dw_loc_descr_ref
14442 tls_mem_loc_descriptor (rtx mem)
14443 {
14444 tree base;
14445 dw_loc_descr_ref loc_result;
14446
14447 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14448 return NULL;
14449
14450 base = get_base_address (MEM_EXPR (mem));
14451 if (base == NULL
14452 || !VAR_P (base)
14453 || !DECL_THREAD_LOCAL_P (base))
14454 return NULL;
14455
14456 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14457 if (loc_result == NULL)
14458 return NULL;
14459
14460 if (maybe_ne (MEM_OFFSET (mem), 0))
14461 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14462
14463 return loc_result;
14464 }
14465
14466 /* Output debug info about reason why we failed to expand expression as dwarf
14467 expression. */
14468
14469 static void
14470 expansion_failed (tree expr, rtx rtl, char const *reason)
14471 {
14472 if (dump_file && (dump_flags & TDF_DETAILS))
14473 {
14474 fprintf (dump_file, "Failed to expand as dwarf: ");
14475 if (expr)
14476 print_generic_expr (dump_file, expr, dump_flags);
14477 if (rtl)
14478 {
14479 fprintf (dump_file, "\n");
14480 print_rtl (dump_file, rtl);
14481 }
14482 fprintf (dump_file, "\nReason: %s\n", reason);
14483 }
14484 }
14485
14486 /* Helper function for const_ok_for_output. */
14487
14488 static bool
14489 const_ok_for_output_1 (rtx rtl)
14490 {
14491 if (targetm.const_not_ok_for_debug_p (rtl))
14492 {
14493 if (GET_CODE (rtl) != UNSPEC)
14494 {
14495 expansion_failed (NULL_TREE, rtl,
14496 "Expression rejected for debug by the backend.\n");
14497 return false;
14498 }
14499
14500 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14501 the target hook doesn't explicitly allow it in debug info, assume
14502 we can't express it in the debug info. */
14503 /* Don't complain about TLS UNSPECs, those are just too hard to
14504 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14505 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14506 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14507 if (flag_checking
14508 && (XVECLEN (rtl, 0) == 0
14509 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14510 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14511 inform (current_function_decl
14512 ? DECL_SOURCE_LOCATION (current_function_decl)
14513 : UNKNOWN_LOCATION,
14514 #if NUM_UNSPEC_VALUES > 0
14515 "non-delegitimized UNSPEC %s (%d) found in variable location",
14516 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14517 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14518 #else
14519 "non-delegitimized UNSPEC %d found in variable location",
14520 #endif
14521 XINT (rtl, 1));
14522 expansion_failed (NULL_TREE, rtl,
14523 "UNSPEC hasn't been delegitimized.\n");
14524 return false;
14525 }
14526
14527 if (CONST_POLY_INT_P (rtl))
14528 return false;
14529
14530 /* FIXME: Refer to PR60655. It is possible for simplification
14531 of rtl expressions in var tracking to produce such expressions.
14532 We should really identify / validate expressions
14533 enclosed in CONST that can be handled by assemblers on various
14534 targets and only handle legitimate cases here. */
14535 switch (GET_CODE (rtl))
14536 {
14537 case SYMBOL_REF:
14538 break;
14539 case NOT:
14540 case NEG:
14541 return false;
14542 case PLUS:
14543 {
14544 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14545 operands. */
14546 subrtx_var_iterator::array_type array;
14547 bool first = false;
14548 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14549 if (SYMBOL_REF_P (*iter)
14550 || LABEL_P (*iter)
14551 || GET_CODE (*iter) == UNSPEC)
14552 {
14553 first = true;
14554 break;
14555 }
14556 if (!first)
14557 return true;
14558 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14559 if (SYMBOL_REF_P (*iter)
14560 || LABEL_P (*iter)
14561 || GET_CODE (*iter) == UNSPEC)
14562 return false;
14563 return true;
14564 }
14565 case MINUS:
14566 {
14567 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14568 appear in the second operand of MINUS. */
14569 subrtx_var_iterator::array_type array;
14570 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14571 if (SYMBOL_REF_P (*iter)
14572 || LABEL_P (*iter)
14573 || GET_CODE (*iter) == UNSPEC)
14574 return false;
14575 return true;
14576 }
14577 default:
14578 return true;
14579 }
14580
14581 if (CONSTANT_POOL_ADDRESS_P (rtl))
14582 {
14583 bool marked;
14584 get_pool_constant_mark (rtl, &marked);
14585 /* If all references to this pool constant were optimized away,
14586 it was not output and thus we can't represent it. */
14587 if (!marked)
14588 {
14589 expansion_failed (NULL_TREE, rtl,
14590 "Constant was removed from constant pool.\n");
14591 return false;
14592 }
14593 }
14594
14595 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14596 return false;
14597
14598 /* Avoid references to external symbols in debug info, on several targets
14599 the linker might even refuse to link when linking a shared library,
14600 and in many other cases the relocations for .debug_info/.debug_loc are
14601 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14602 to be defined within the same shared library or executable are fine. */
14603 if (SYMBOL_REF_EXTERNAL_P (rtl))
14604 {
14605 tree decl = SYMBOL_REF_DECL (rtl);
14606
14607 if (decl == NULL || !targetm.binds_local_p (decl))
14608 {
14609 expansion_failed (NULL_TREE, rtl,
14610 "Symbol not defined in current TU.\n");
14611 return false;
14612 }
14613 }
14614
14615 return true;
14616 }
14617
14618 /* Return true if constant RTL can be emitted in DW_OP_addr or
14619 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14620 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14621
14622 static bool
14623 const_ok_for_output (rtx rtl)
14624 {
14625 if (GET_CODE (rtl) == SYMBOL_REF)
14626 return const_ok_for_output_1 (rtl);
14627
14628 if (GET_CODE (rtl) == CONST)
14629 {
14630 subrtx_var_iterator::array_type array;
14631 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14632 if (!const_ok_for_output_1 (*iter))
14633 return false;
14634 return true;
14635 }
14636
14637 return true;
14638 }
14639
14640 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14641 if possible, NULL otherwise. */
14642
14643 static dw_die_ref
14644 base_type_for_mode (machine_mode mode, bool unsignedp)
14645 {
14646 dw_die_ref type_die;
14647 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14648
14649 if (type == NULL)
14650 return NULL;
14651 switch (TREE_CODE (type))
14652 {
14653 case INTEGER_TYPE:
14654 case REAL_TYPE:
14655 break;
14656 default:
14657 return NULL;
14658 }
14659 type_die = lookup_type_die (type);
14660 if (!type_die)
14661 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14662 comp_unit_die ());
14663 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14664 return NULL;
14665 return type_die;
14666 }
14667
14668 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14669 type matching MODE, or, if MODE is narrower than or as wide as
14670 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14671 possible. */
14672
14673 static dw_loc_descr_ref
14674 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14675 {
14676 machine_mode outer_mode = mode;
14677 dw_die_ref type_die;
14678 dw_loc_descr_ref cvt;
14679
14680 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14681 {
14682 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14683 return op;
14684 }
14685 type_die = base_type_for_mode (outer_mode, 1);
14686 if (type_die == NULL)
14687 return NULL;
14688 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14689 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14690 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14691 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14692 add_loc_descr (&op, cvt);
14693 return op;
14694 }
14695
14696 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14697
14698 static dw_loc_descr_ref
14699 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14700 dw_loc_descr_ref op1)
14701 {
14702 dw_loc_descr_ref ret = op0;
14703 add_loc_descr (&ret, op1);
14704 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14705 if (STORE_FLAG_VALUE != 1)
14706 {
14707 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14708 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14709 }
14710 return ret;
14711 }
14712
14713 /* Subroutine of scompare_loc_descriptor for the case in which we're
14714 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14715 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14716
14717 static dw_loc_descr_ref
14718 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14719 scalar_int_mode op_mode,
14720 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14721 {
14722 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14723 dw_loc_descr_ref cvt;
14724
14725 if (type_die == NULL)
14726 return NULL;
14727 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14728 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14729 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14730 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14731 add_loc_descr (&op0, cvt);
14732 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14733 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14734 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14735 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14736 add_loc_descr (&op1, cvt);
14737 return compare_loc_descriptor (op, op0, op1);
14738 }
14739
14740 /* Subroutine of scompare_loc_descriptor for the case in which we're
14741 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14742 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14743
14744 static dw_loc_descr_ref
14745 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14746 scalar_int_mode op_mode,
14747 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14748 {
14749 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14750 /* For eq/ne, if the operands are known to be zero-extended,
14751 there is no need to do the fancy shifting up. */
14752 if (op == DW_OP_eq || op == DW_OP_ne)
14753 {
14754 dw_loc_descr_ref last0, last1;
14755 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14756 ;
14757 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14758 ;
14759 /* deref_size zero extends, and for constants we can check
14760 whether they are zero extended or not. */
14761 if (((last0->dw_loc_opc == DW_OP_deref_size
14762 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14763 || (CONST_INT_P (XEXP (rtl, 0))
14764 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14765 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14766 && ((last1->dw_loc_opc == DW_OP_deref_size
14767 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14768 || (CONST_INT_P (XEXP (rtl, 1))
14769 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14770 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14771 return compare_loc_descriptor (op, op0, op1);
14772
14773 /* EQ/NE comparison against constant in narrower type than
14774 DWARF2_ADDR_SIZE can be performed either as
14775 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14776 DW_OP_{eq,ne}
14777 or
14778 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14779 DW_OP_{eq,ne}. Pick whatever is shorter. */
14780 if (CONST_INT_P (XEXP (rtl, 1))
14781 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14782 && (size_of_int_loc_descriptor (shift) + 1
14783 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14784 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14785 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14786 & GET_MODE_MASK (op_mode))))
14787 {
14788 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14789 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14790 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14791 & GET_MODE_MASK (op_mode));
14792 return compare_loc_descriptor (op, op0, op1);
14793 }
14794 }
14795 add_loc_descr (&op0, int_loc_descriptor (shift));
14796 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14797 if (CONST_INT_P (XEXP (rtl, 1)))
14798 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14799 else
14800 {
14801 add_loc_descr (&op1, int_loc_descriptor (shift));
14802 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14803 }
14804 return compare_loc_descriptor (op, op0, op1);
14805 }
14806
14807 /* Return location descriptor for unsigned comparison OP RTL. */
14808
14809 static dw_loc_descr_ref
14810 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14811 machine_mode mem_mode)
14812 {
14813 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14814 dw_loc_descr_ref op0, op1;
14815
14816 if (op_mode == VOIDmode)
14817 op_mode = GET_MODE (XEXP (rtl, 1));
14818 if (op_mode == VOIDmode)
14819 return NULL;
14820
14821 scalar_int_mode int_op_mode;
14822 if (dwarf_strict
14823 && dwarf_version < 5
14824 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14825 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14826 return NULL;
14827
14828 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14829 VAR_INIT_STATUS_INITIALIZED);
14830 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14831 VAR_INIT_STATUS_INITIALIZED);
14832
14833 if (op0 == NULL || op1 == NULL)
14834 return NULL;
14835
14836 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14837 {
14838 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14839 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14840
14841 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14842 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14843 }
14844 return compare_loc_descriptor (op, op0, op1);
14845 }
14846
14847 /* Return location descriptor for unsigned comparison OP RTL. */
14848
14849 static dw_loc_descr_ref
14850 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14851 machine_mode mem_mode)
14852 {
14853 dw_loc_descr_ref op0, op1;
14854
14855 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14856 if (test_op_mode == VOIDmode)
14857 test_op_mode = GET_MODE (XEXP (rtl, 1));
14858
14859 scalar_int_mode op_mode;
14860 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14861 return NULL;
14862
14863 if (dwarf_strict
14864 && dwarf_version < 5
14865 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14866 return NULL;
14867
14868 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14869 VAR_INIT_STATUS_INITIALIZED);
14870 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14871 VAR_INIT_STATUS_INITIALIZED);
14872
14873 if (op0 == NULL || op1 == NULL)
14874 return NULL;
14875
14876 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14877 {
14878 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14879 dw_loc_descr_ref last0, last1;
14880 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14881 ;
14882 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14883 ;
14884 if (CONST_INT_P (XEXP (rtl, 0)))
14885 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14886 /* deref_size zero extends, so no need to mask it again. */
14887 else if (last0->dw_loc_opc != DW_OP_deref_size
14888 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14889 {
14890 add_loc_descr (&op0, int_loc_descriptor (mask));
14891 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14892 }
14893 if (CONST_INT_P (XEXP (rtl, 1)))
14894 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14895 /* deref_size zero extends, so no need to mask it again. */
14896 else if (last1->dw_loc_opc != DW_OP_deref_size
14897 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14898 {
14899 add_loc_descr (&op1, int_loc_descriptor (mask));
14900 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14901 }
14902 }
14903 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14904 {
14905 HOST_WIDE_INT bias = 1;
14906 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14907 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14908 if (CONST_INT_P (XEXP (rtl, 1)))
14909 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14910 + INTVAL (XEXP (rtl, 1)));
14911 else
14912 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14913 bias, 0));
14914 }
14915 return compare_loc_descriptor (op, op0, op1);
14916 }
14917
14918 /* Return location descriptor for {U,S}{MIN,MAX}. */
14919
14920 static dw_loc_descr_ref
14921 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14922 machine_mode mem_mode)
14923 {
14924 enum dwarf_location_atom op;
14925 dw_loc_descr_ref op0, op1, ret;
14926 dw_loc_descr_ref bra_node, drop_node;
14927
14928 scalar_int_mode int_mode;
14929 if (dwarf_strict
14930 && dwarf_version < 5
14931 && (!is_a <scalar_int_mode> (mode, &int_mode)
14932 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14933 return NULL;
14934
14935 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14936 VAR_INIT_STATUS_INITIALIZED);
14937 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14938 VAR_INIT_STATUS_INITIALIZED);
14939
14940 if (op0 == NULL || op1 == NULL)
14941 return NULL;
14942
14943 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14944 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14945 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14946 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14947 {
14948 /* Checked by the caller. */
14949 int_mode = as_a <scalar_int_mode> (mode);
14950 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14951 {
14952 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14953 add_loc_descr (&op0, int_loc_descriptor (mask));
14954 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14955 add_loc_descr (&op1, int_loc_descriptor (mask));
14956 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14957 }
14958 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14959 {
14960 HOST_WIDE_INT bias = 1;
14961 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14962 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14963 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14964 }
14965 }
14966 else if (is_a <scalar_int_mode> (mode, &int_mode)
14967 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14968 {
14969 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14970 add_loc_descr (&op0, int_loc_descriptor (shift));
14971 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14972 add_loc_descr (&op1, int_loc_descriptor (shift));
14973 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14974 }
14975 else if (is_a <scalar_int_mode> (mode, &int_mode)
14976 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14977 {
14978 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14979 dw_loc_descr_ref cvt;
14980 if (type_die == NULL)
14981 return NULL;
14982 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14983 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14984 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14985 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14986 add_loc_descr (&op0, cvt);
14987 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14988 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14989 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14990 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14991 add_loc_descr (&op1, cvt);
14992 }
14993
14994 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14995 op = DW_OP_lt;
14996 else
14997 op = DW_OP_gt;
14998 ret = op0;
14999 add_loc_descr (&ret, op1);
15000 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
15001 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15002 add_loc_descr (&ret, bra_node);
15003 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15004 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15005 add_loc_descr (&ret, drop_node);
15006 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15007 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15008 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15009 && is_a <scalar_int_mode> (mode, &int_mode)
15010 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15011 ret = convert_descriptor_to_mode (int_mode, ret);
15012 return ret;
15013 }
15014
15015 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15016 but after converting arguments to type_die, afterwards
15017 convert back to unsigned. */
15018
15019 static dw_loc_descr_ref
15020 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15021 scalar_int_mode mode, machine_mode mem_mode)
15022 {
15023 dw_loc_descr_ref cvt, op0, op1;
15024
15025 if (type_die == NULL)
15026 return NULL;
15027 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15028 VAR_INIT_STATUS_INITIALIZED);
15029 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15030 VAR_INIT_STATUS_INITIALIZED);
15031 if (op0 == NULL || op1 == NULL)
15032 return NULL;
15033 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15034 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15035 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15036 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15037 add_loc_descr (&op0, cvt);
15038 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15039 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15040 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15041 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15042 add_loc_descr (&op1, cvt);
15043 add_loc_descr (&op0, op1);
15044 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15045 return convert_descriptor_to_mode (mode, op0);
15046 }
15047
15048 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15049 const0 is DW_OP_lit0 or corresponding typed constant,
15050 const1 is DW_OP_lit1 or corresponding typed constant
15051 and constMSB is constant with just the MSB bit set
15052 for the mode):
15053 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15054 L1: const0 DW_OP_swap
15055 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15056 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15057 L3: DW_OP_drop
15058 L4: DW_OP_nop
15059
15060 CTZ is similar:
15061 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15062 L1: const0 DW_OP_swap
15063 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15064 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15065 L3: DW_OP_drop
15066 L4: DW_OP_nop
15067
15068 FFS is similar:
15069 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15070 L1: const1 DW_OP_swap
15071 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15072 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15073 L3: DW_OP_drop
15074 L4: DW_OP_nop */
15075
15076 static dw_loc_descr_ref
15077 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15078 machine_mode mem_mode)
15079 {
15080 dw_loc_descr_ref op0, ret, tmp;
15081 HOST_WIDE_INT valv;
15082 dw_loc_descr_ref l1jump, l1label;
15083 dw_loc_descr_ref l2jump, l2label;
15084 dw_loc_descr_ref l3jump, l3label;
15085 dw_loc_descr_ref l4jump, l4label;
15086 rtx msb;
15087
15088 if (GET_MODE (XEXP (rtl, 0)) != mode)
15089 return NULL;
15090
15091 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15092 VAR_INIT_STATUS_INITIALIZED);
15093 if (op0 == NULL)
15094 return NULL;
15095 ret = op0;
15096 if (GET_CODE (rtl) == CLZ)
15097 {
15098 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15099 valv = GET_MODE_BITSIZE (mode);
15100 }
15101 else if (GET_CODE (rtl) == FFS)
15102 valv = 0;
15103 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15104 valv = GET_MODE_BITSIZE (mode);
15105 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15106 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15107 add_loc_descr (&ret, l1jump);
15108 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15109 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15110 VAR_INIT_STATUS_INITIALIZED);
15111 if (tmp == NULL)
15112 return NULL;
15113 add_loc_descr (&ret, tmp);
15114 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15115 add_loc_descr (&ret, l4jump);
15116 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15117 ? const1_rtx : const0_rtx,
15118 mode, mem_mode,
15119 VAR_INIT_STATUS_INITIALIZED);
15120 if (l1label == NULL)
15121 return NULL;
15122 add_loc_descr (&ret, l1label);
15123 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15124 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15125 add_loc_descr (&ret, l2label);
15126 if (GET_CODE (rtl) != CLZ)
15127 msb = const1_rtx;
15128 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15129 msb = GEN_INT (HOST_WIDE_INT_1U
15130 << (GET_MODE_BITSIZE (mode) - 1));
15131 else
15132 msb = immed_wide_int_const
15133 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15134 GET_MODE_PRECISION (mode)), mode);
15135 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15136 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15137 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15138 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15139 else
15140 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15141 VAR_INIT_STATUS_INITIALIZED);
15142 if (tmp == NULL)
15143 return NULL;
15144 add_loc_descr (&ret, tmp);
15145 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15146 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15147 add_loc_descr (&ret, l3jump);
15148 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15149 VAR_INIT_STATUS_INITIALIZED);
15150 if (tmp == NULL)
15151 return NULL;
15152 add_loc_descr (&ret, tmp);
15153 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15154 ? DW_OP_shl : DW_OP_shr, 0, 0));
15155 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15156 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15157 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15158 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15159 add_loc_descr (&ret, l2jump);
15160 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15161 add_loc_descr (&ret, l3label);
15162 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15163 add_loc_descr (&ret, l4label);
15164 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15165 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15166 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15167 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15168 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15169 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15170 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15171 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15172 return ret;
15173 }
15174
15175 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15176 const1 is DW_OP_lit1 or corresponding typed constant):
15177 const0 DW_OP_swap
15178 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15179 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15180 L2: DW_OP_drop
15181
15182 PARITY is similar:
15183 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15184 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15185 L2: DW_OP_drop */
15186
15187 static dw_loc_descr_ref
15188 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15189 machine_mode mem_mode)
15190 {
15191 dw_loc_descr_ref op0, ret, tmp;
15192 dw_loc_descr_ref l1jump, l1label;
15193 dw_loc_descr_ref l2jump, l2label;
15194
15195 if (GET_MODE (XEXP (rtl, 0)) != mode)
15196 return NULL;
15197
15198 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15199 VAR_INIT_STATUS_INITIALIZED);
15200 if (op0 == NULL)
15201 return NULL;
15202 ret = op0;
15203 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15204 VAR_INIT_STATUS_INITIALIZED);
15205 if (tmp == NULL)
15206 return NULL;
15207 add_loc_descr (&ret, tmp);
15208 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15209 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15210 add_loc_descr (&ret, l1label);
15211 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15212 add_loc_descr (&ret, l2jump);
15213 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15215 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15216 VAR_INIT_STATUS_INITIALIZED);
15217 if (tmp == NULL)
15218 return NULL;
15219 add_loc_descr (&ret, tmp);
15220 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15221 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15222 ? DW_OP_plus : DW_OP_xor, 0, 0));
15223 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15224 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15225 VAR_INIT_STATUS_INITIALIZED);
15226 add_loc_descr (&ret, tmp);
15227 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15228 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15229 add_loc_descr (&ret, l1jump);
15230 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15231 add_loc_descr (&ret, l2label);
15232 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15233 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15234 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15235 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15236 return ret;
15237 }
15238
15239 /* BSWAP (constS is initial shift count, either 56 or 24):
15240 constS const0
15241 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15242 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15243 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15244 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15245 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15246
15247 static dw_loc_descr_ref
15248 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15249 machine_mode mem_mode)
15250 {
15251 dw_loc_descr_ref op0, ret, tmp;
15252 dw_loc_descr_ref l1jump, l1label;
15253 dw_loc_descr_ref l2jump, l2label;
15254
15255 if (BITS_PER_UNIT != 8
15256 || (GET_MODE_BITSIZE (mode) != 32
15257 && GET_MODE_BITSIZE (mode) != 64))
15258 return NULL;
15259
15260 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15261 VAR_INIT_STATUS_INITIALIZED);
15262 if (op0 == NULL)
15263 return NULL;
15264
15265 ret = op0;
15266 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15267 mode, mem_mode,
15268 VAR_INIT_STATUS_INITIALIZED);
15269 if (tmp == NULL)
15270 return NULL;
15271 add_loc_descr (&ret, tmp);
15272 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15273 VAR_INIT_STATUS_INITIALIZED);
15274 if (tmp == NULL)
15275 return NULL;
15276 add_loc_descr (&ret, tmp);
15277 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15278 add_loc_descr (&ret, l1label);
15279 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15280 mode, mem_mode,
15281 VAR_INIT_STATUS_INITIALIZED);
15282 add_loc_descr (&ret, tmp);
15283 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15284 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15285 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15286 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15287 VAR_INIT_STATUS_INITIALIZED);
15288 if (tmp == NULL)
15289 return NULL;
15290 add_loc_descr (&ret, tmp);
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15292 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15293 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15294 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15295 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15296 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15297 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15298 VAR_INIT_STATUS_INITIALIZED);
15299 add_loc_descr (&ret, tmp);
15300 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15301 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15302 add_loc_descr (&ret, l2jump);
15303 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15304 VAR_INIT_STATUS_INITIALIZED);
15305 add_loc_descr (&ret, tmp);
15306 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15308 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15309 add_loc_descr (&ret, l1jump);
15310 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15311 add_loc_descr (&ret, l2label);
15312 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15313 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15314 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15315 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15316 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15317 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15318 return ret;
15319 }
15320
15321 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15322 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15323 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15324 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15325
15326 ROTATERT is similar:
15327 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15328 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15329 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15330
15331 static dw_loc_descr_ref
15332 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15333 machine_mode mem_mode)
15334 {
15335 rtx rtlop1 = XEXP (rtl, 1);
15336 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15337 int i;
15338
15339 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15340 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15341 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15342 VAR_INIT_STATUS_INITIALIZED);
15343 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15344 VAR_INIT_STATUS_INITIALIZED);
15345 if (op0 == NULL || op1 == NULL)
15346 return NULL;
15347 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15348 for (i = 0; i < 2; i++)
15349 {
15350 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15351 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15352 mode, mem_mode,
15353 VAR_INIT_STATUS_INITIALIZED);
15354 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15355 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15356 ? DW_OP_const4u
15357 : HOST_BITS_PER_WIDE_INT == 64
15358 ? DW_OP_const8u : DW_OP_constu,
15359 GET_MODE_MASK (mode), 0);
15360 else
15361 mask[i] = NULL;
15362 if (mask[i] == NULL)
15363 return NULL;
15364 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15365 }
15366 ret = op0;
15367 add_loc_descr (&ret, op1);
15368 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15369 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15370 if (GET_CODE (rtl) == ROTATERT)
15371 {
15372 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15373 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15374 GET_MODE_BITSIZE (mode), 0));
15375 }
15376 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15377 if (mask[0] != NULL)
15378 add_loc_descr (&ret, mask[0]);
15379 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15380 if (mask[1] != NULL)
15381 {
15382 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15383 add_loc_descr (&ret, mask[1]);
15384 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15385 }
15386 if (GET_CODE (rtl) == ROTATE)
15387 {
15388 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15389 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15390 GET_MODE_BITSIZE (mode), 0));
15391 }
15392 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15393 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15394 return ret;
15395 }
15396
15397 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15398 for DEBUG_PARAMETER_REF RTL. */
15399
15400 static dw_loc_descr_ref
15401 parameter_ref_descriptor (rtx rtl)
15402 {
15403 dw_loc_descr_ref ret;
15404 dw_die_ref ref;
15405
15406 if (dwarf_strict)
15407 return NULL;
15408 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15409 /* With LTO during LTRANS we get the late DIE that refers to the early
15410 DIE, thus we add another indirection here. This seems to confuse
15411 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15412 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15413 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15414 if (ref)
15415 {
15416 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15417 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15418 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15419 }
15420 else
15421 {
15422 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15423 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15424 }
15425 return ret;
15426 }
15427
15428 /* The following routine converts the RTL for a variable or parameter
15429 (resident in memory) into an equivalent Dwarf representation of a
15430 mechanism for getting the address of that same variable onto the top of a
15431 hypothetical "address evaluation" stack.
15432
15433 When creating memory location descriptors, we are effectively transforming
15434 the RTL for a memory-resident object into its Dwarf postfix expression
15435 equivalent. This routine recursively descends an RTL tree, turning
15436 it into Dwarf postfix code as it goes.
15437
15438 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15439
15440 MEM_MODE is the mode of the memory reference, needed to handle some
15441 autoincrement addressing modes.
15442
15443 Return 0 if we can't represent the location. */
15444
15445 dw_loc_descr_ref
15446 mem_loc_descriptor (rtx rtl, machine_mode mode,
15447 machine_mode mem_mode,
15448 enum var_init_status initialized)
15449 {
15450 dw_loc_descr_ref mem_loc_result = NULL;
15451 enum dwarf_location_atom op;
15452 dw_loc_descr_ref op0, op1;
15453 rtx inner = NULL_RTX;
15454 poly_int64 offset;
15455
15456 if (mode == VOIDmode)
15457 mode = GET_MODE (rtl);
15458
15459 /* Note that for a dynamically sized array, the location we will generate a
15460 description of here will be the lowest numbered location which is
15461 actually within the array. That's *not* necessarily the same as the
15462 zeroth element of the array. */
15463
15464 rtl = targetm.delegitimize_address (rtl);
15465
15466 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15467 return NULL;
15468
15469 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15470 switch (GET_CODE (rtl))
15471 {
15472 case POST_INC:
15473 case POST_DEC:
15474 case POST_MODIFY:
15475 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15476
15477 case SUBREG:
15478 /* The case of a subreg may arise when we have a local (register)
15479 variable or a formal (register) parameter which doesn't quite fill
15480 up an entire register. For now, just assume that it is
15481 legitimate to make the Dwarf info refer to the whole register which
15482 contains the given subreg. */
15483 if (!subreg_lowpart_p (rtl))
15484 break;
15485 inner = SUBREG_REG (rtl);
15486 /* FALLTHRU */
15487 case TRUNCATE:
15488 if (inner == NULL_RTX)
15489 inner = XEXP (rtl, 0);
15490 if (is_a <scalar_int_mode> (mode, &int_mode)
15491 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15492 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15493 #ifdef POINTERS_EXTEND_UNSIGNED
15494 || (int_mode == Pmode && mem_mode != VOIDmode)
15495 #endif
15496 )
15497 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15498 {
15499 mem_loc_result = mem_loc_descriptor (inner,
15500 inner_mode,
15501 mem_mode, initialized);
15502 break;
15503 }
15504 if (dwarf_strict && dwarf_version < 5)
15505 break;
15506 if (is_a <scalar_int_mode> (mode, &int_mode)
15507 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15508 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15509 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15510 {
15511 dw_die_ref type_die;
15512 dw_loc_descr_ref cvt;
15513
15514 mem_loc_result = mem_loc_descriptor (inner,
15515 GET_MODE (inner),
15516 mem_mode, initialized);
15517 if (mem_loc_result == NULL)
15518 break;
15519 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15520 if (type_die == NULL)
15521 {
15522 mem_loc_result = NULL;
15523 break;
15524 }
15525 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15526 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15527 else
15528 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15529 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15530 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15531 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15532 add_loc_descr (&mem_loc_result, cvt);
15533 if (is_a <scalar_int_mode> (mode, &int_mode)
15534 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15535 {
15536 /* Convert it to untyped afterwards. */
15537 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15538 add_loc_descr (&mem_loc_result, cvt);
15539 }
15540 }
15541 break;
15542
15543 case REG:
15544 if (!is_a <scalar_int_mode> (mode, &int_mode)
15545 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15546 && rtl != arg_pointer_rtx
15547 && rtl != frame_pointer_rtx
15548 #ifdef POINTERS_EXTEND_UNSIGNED
15549 && (int_mode != Pmode || mem_mode == VOIDmode)
15550 #endif
15551 ))
15552 {
15553 dw_die_ref type_die;
15554 unsigned int dbx_regnum;
15555
15556 if (dwarf_strict && dwarf_version < 5)
15557 break;
15558 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15559 break;
15560 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15561 if (type_die == NULL)
15562 break;
15563
15564 dbx_regnum = dbx_reg_number (rtl);
15565 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15566 break;
15567 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15568 dbx_regnum, 0);
15569 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15570 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15571 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15572 break;
15573 }
15574 /* Whenever a register number forms a part of the description of the
15575 method for calculating the (dynamic) address of a memory resident
15576 object, DWARF rules require the register number be referred to as
15577 a "base register". This distinction is not based in any way upon
15578 what category of register the hardware believes the given register
15579 belongs to. This is strictly DWARF terminology we're dealing with
15580 here. Note that in cases where the location of a memory-resident
15581 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15582 OP_CONST (0)) the actual DWARF location descriptor that we generate
15583 may just be OP_BASEREG (basereg). This may look deceptively like
15584 the object in question was allocated to a register (rather than in
15585 memory) so DWARF consumers need to be aware of the subtle
15586 distinction between OP_REG and OP_BASEREG. */
15587 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15588 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15589 else if (stack_realign_drap
15590 && crtl->drap_reg
15591 && crtl->args.internal_arg_pointer == rtl
15592 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15593 {
15594 /* If RTL is internal_arg_pointer, which has been optimized
15595 out, use DRAP instead. */
15596 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15597 VAR_INIT_STATUS_INITIALIZED);
15598 }
15599 break;
15600
15601 case SIGN_EXTEND:
15602 case ZERO_EXTEND:
15603 if (!is_a <scalar_int_mode> (mode, &int_mode)
15604 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15605 break;
15606 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15607 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15608 if (op0 == 0)
15609 break;
15610 else if (GET_CODE (rtl) == ZERO_EXTEND
15611 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15612 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15613 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15614 to expand zero extend as two shifts instead of
15615 masking. */
15616 && GET_MODE_SIZE (inner_mode) <= 4)
15617 {
15618 mem_loc_result = op0;
15619 add_loc_descr (&mem_loc_result,
15620 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15621 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15622 }
15623 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15624 {
15625 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15626 shift *= BITS_PER_UNIT;
15627 if (GET_CODE (rtl) == SIGN_EXTEND)
15628 op = DW_OP_shra;
15629 else
15630 op = DW_OP_shr;
15631 mem_loc_result = op0;
15632 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15633 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15634 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15635 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15636 }
15637 else if (!dwarf_strict || dwarf_version >= 5)
15638 {
15639 dw_die_ref type_die1, type_die2;
15640 dw_loc_descr_ref cvt;
15641
15642 type_die1 = base_type_for_mode (inner_mode,
15643 GET_CODE (rtl) == ZERO_EXTEND);
15644 if (type_die1 == NULL)
15645 break;
15646 type_die2 = base_type_for_mode (int_mode, 1);
15647 if (type_die2 == NULL)
15648 break;
15649 mem_loc_result = op0;
15650 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15651 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15652 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15653 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15654 add_loc_descr (&mem_loc_result, cvt);
15655 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15656 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15657 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15658 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15659 add_loc_descr (&mem_loc_result, cvt);
15660 }
15661 break;
15662
15663 case MEM:
15664 {
15665 rtx new_rtl = avoid_constant_pool_reference (rtl);
15666 if (new_rtl != rtl)
15667 {
15668 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15669 initialized);
15670 if (mem_loc_result != NULL)
15671 return mem_loc_result;
15672 }
15673 }
15674 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15675 get_address_mode (rtl), mode,
15676 VAR_INIT_STATUS_INITIALIZED);
15677 if (mem_loc_result == NULL)
15678 mem_loc_result = tls_mem_loc_descriptor (rtl);
15679 if (mem_loc_result != NULL)
15680 {
15681 if (!is_a <scalar_int_mode> (mode, &int_mode)
15682 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15683 {
15684 dw_die_ref type_die;
15685 dw_loc_descr_ref deref;
15686 HOST_WIDE_INT size;
15687
15688 if (dwarf_strict && dwarf_version < 5)
15689 return NULL;
15690 if (!GET_MODE_SIZE (mode).is_constant (&size))
15691 return NULL;
15692 type_die
15693 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15694 if (type_die == NULL)
15695 return NULL;
15696 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15697 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15698 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15699 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15700 add_loc_descr (&mem_loc_result, deref);
15701 }
15702 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15703 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15704 else
15705 add_loc_descr (&mem_loc_result,
15706 new_loc_descr (DW_OP_deref_size,
15707 GET_MODE_SIZE (int_mode), 0));
15708 }
15709 break;
15710
15711 case LO_SUM:
15712 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15713
15714 case LABEL_REF:
15715 /* Some ports can transform a symbol ref into a label ref, because
15716 the symbol ref is too far away and has to be dumped into a constant
15717 pool. */
15718 case CONST:
15719 case SYMBOL_REF:
15720 case UNSPEC:
15721 if (!is_a <scalar_int_mode> (mode, &int_mode)
15722 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15723 #ifdef POINTERS_EXTEND_UNSIGNED
15724 && (int_mode != Pmode || mem_mode == VOIDmode)
15725 #endif
15726 ))
15727 break;
15728
15729 if (GET_CODE (rtl) == UNSPEC)
15730 {
15731 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15732 can't express it in the debug info. This can happen e.g. with some
15733 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15734 approves. */
15735 bool not_ok = false;
15736 subrtx_var_iterator::array_type array;
15737 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15738 if (*iter != rtl && !CONSTANT_P (*iter))
15739 {
15740 not_ok = true;
15741 break;
15742 }
15743
15744 if (not_ok)
15745 break;
15746
15747 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15748 if (!const_ok_for_output_1 (*iter))
15749 {
15750 not_ok = true;
15751 break;
15752 }
15753
15754 if (not_ok)
15755 break;
15756
15757 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15758 goto symref;
15759 }
15760
15761 if (GET_CODE (rtl) == SYMBOL_REF
15762 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15763 {
15764 dw_loc_descr_ref temp;
15765
15766 /* If this is not defined, we have no way to emit the data. */
15767 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15768 break;
15769
15770 temp = new_addr_loc_descr (rtl, dtprel_true);
15771
15772 /* We check for DWARF 5 here because gdb did not implement
15773 DW_OP_form_tls_address until after 7.12. */
15774 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15775 ? DW_OP_form_tls_address
15776 : DW_OP_GNU_push_tls_address),
15777 0, 0);
15778 add_loc_descr (&mem_loc_result, temp);
15779
15780 break;
15781 }
15782
15783 if (!const_ok_for_output (rtl))
15784 {
15785 if (GET_CODE (rtl) == CONST)
15786 switch (GET_CODE (XEXP (rtl, 0)))
15787 {
15788 case NOT:
15789 op = DW_OP_not;
15790 goto try_const_unop;
15791 case NEG:
15792 op = DW_OP_neg;
15793 goto try_const_unop;
15794 try_const_unop:
15795 rtx arg;
15796 arg = XEXP (XEXP (rtl, 0), 0);
15797 if (!CONSTANT_P (arg))
15798 arg = gen_rtx_CONST (int_mode, arg);
15799 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15800 initialized);
15801 if (op0)
15802 {
15803 mem_loc_result = op0;
15804 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15805 }
15806 break;
15807 default:
15808 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15809 mem_mode, initialized);
15810 break;
15811 }
15812 break;
15813 }
15814
15815 symref:
15816 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15817 vec_safe_push (used_rtx_array, rtl);
15818 break;
15819
15820 case CONCAT:
15821 case CONCATN:
15822 case VAR_LOCATION:
15823 case DEBUG_IMPLICIT_PTR:
15824 expansion_failed (NULL_TREE, rtl,
15825 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15826 return 0;
15827
15828 case ENTRY_VALUE:
15829 if (dwarf_strict && dwarf_version < 5)
15830 return NULL;
15831 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15832 {
15833 if (!is_a <scalar_int_mode> (mode, &int_mode)
15834 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15835 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15836 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15837 else
15838 {
15839 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15840 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15841 return NULL;
15842 op0 = one_reg_loc_descriptor (dbx_regnum,
15843 VAR_INIT_STATUS_INITIALIZED);
15844 }
15845 }
15846 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15847 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15848 {
15849 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15850 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15851 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15852 return NULL;
15853 }
15854 else
15855 gcc_unreachable ();
15856 if (op0 == NULL)
15857 return NULL;
15858 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15859 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15860 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15861 break;
15862
15863 case DEBUG_PARAMETER_REF:
15864 mem_loc_result = parameter_ref_descriptor (rtl);
15865 break;
15866
15867 case PRE_MODIFY:
15868 /* Extract the PLUS expression nested inside and fall into
15869 PLUS code below. */
15870 rtl = XEXP (rtl, 1);
15871 goto plus;
15872
15873 case PRE_INC:
15874 case PRE_DEC:
15875 /* Turn these into a PLUS expression and fall into the PLUS code
15876 below. */
15877 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15878 gen_int_mode (GET_CODE (rtl) == PRE_INC
15879 ? GET_MODE_UNIT_SIZE (mem_mode)
15880 : -GET_MODE_UNIT_SIZE (mem_mode),
15881 mode));
15882
15883 /* fall through */
15884
15885 case PLUS:
15886 plus:
15887 if (is_based_loc (rtl)
15888 && is_a <scalar_int_mode> (mode, &int_mode)
15889 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15890 || XEXP (rtl, 0) == arg_pointer_rtx
15891 || XEXP (rtl, 0) == frame_pointer_rtx))
15892 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15893 INTVAL (XEXP (rtl, 1)),
15894 VAR_INIT_STATUS_INITIALIZED);
15895 else
15896 {
15897 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15898 VAR_INIT_STATUS_INITIALIZED);
15899 if (mem_loc_result == 0)
15900 break;
15901
15902 if (CONST_INT_P (XEXP (rtl, 1))
15903 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15904 <= DWARF2_ADDR_SIZE))
15905 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15906 else
15907 {
15908 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15909 VAR_INIT_STATUS_INITIALIZED);
15910 if (op1 == 0)
15911 return NULL;
15912 add_loc_descr (&mem_loc_result, op1);
15913 add_loc_descr (&mem_loc_result,
15914 new_loc_descr (DW_OP_plus, 0, 0));
15915 }
15916 }
15917 break;
15918
15919 /* If a pseudo-reg is optimized away, it is possible for it to
15920 be replaced with a MEM containing a multiply or shift. */
15921 case MINUS:
15922 op = DW_OP_minus;
15923 goto do_binop;
15924
15925 case MULT:
15926 op = DW_OP_mul;
15927 goto do_binop;
15928
15929 case DIV:
15930 if ((!dwarf_strict || dwarf_version >= 5)
15931 && is_a <scalar_int_mode> (mode, &int_mode)
15932 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15933 {
15934 mem_loc_result = typed_binop (DW_OP_div, rtl,
15935 base_type_for_mode (mode, 0),
15936 int_mode, mem_mode);
15937 break;
15938 }
15939 op = DW_OP_div;
15940 goto do_binop;
15941
15942 case UMOD:
15943 op = DW_OP_mod;
15944 goto do_binop;
15945
15946 case ASHIFT:
15947 op = DW_OP_shl;
15948 goto do_shift;
15949
15950 case ASHIFTRT:
15951 op = DW_OP_shra;
15952 goto do_shift;
15953
15954 case LSHIFTRT:
15955 op = DW_OP_shr;
15956 goto do_shift;
15957
15958 do_shift:
15959 if (!is_a <scalar_int_mode> (mode, &int_mode))
15960 break;
15961 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15962 VAR_INIT_STATUS_INITIALIZED);
15963 {
15964 rtx rtlop1 = XEXP (rtl, 1);
15965 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15966 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15967 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15968 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15969 VAR_INIT_STATUS_INITIALIZED);
15970 }
15971
15972 if (op0 == 0 || op1 == 0)
15973 break;
15974
15975 mem_loc_result = op0;
15976 add_loc_descr (&mem_loc_result, op1);
15977 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15978 break;
15979
15980 case AND:
15981 op = DW_OP_and;
15982 goto do_binop;
15983
15984 case IOR:
15985 op = DW_OP_or;
15986 goto do_binop;
15987
15988 case XOR:
15989 op = DW_OP_xor;
15990 goto do_binop;
15991
15992 do_binop:
15993 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15994 VAR_INIT_STATUS_INITIALIZED);
15995 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15996 VAR_INIT_STATUS_INITIALIZED);
15997
15998 if (op0 == 0 || op1 == 0)
15999 break;
16000
16001 mem_loc_result = op0;
16002 add_loc_descr (&mem_loc_result, op1);
16003 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16004 break;
16005
16006 case MOD:
16007 if ((!dwarf_strict || dwarf_version >= 5)
16008 && is_a <scalar_int_mode> (mode, &int_mode)
16009 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16010 {
16011 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16012 base_type_for_mode (mode, 0),
16013 int_mode, mem_mode);
16014 break;
16015 }
16016
16017 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16018 VAR_INIT_STATUS_INITIALIZED);
16019 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16020 VAR_INIT_STATUS_INITIALIZED);
16021
16022 if (op0 == 0 || op1 == 0)
16023 break;
16024
16025 mem_loc_result = op0;
16026 add_loc_descr (&mem_loc_result, op1);
16027 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16028 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16029 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16030 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16031 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16032 break;
16033
16034 case UDIV:
16035 if ((!dwarf_strict || dwarf_version >= 5)
16036 && is_a <scalar_int_mode> (mode, &int_mode))
16037 {
16038 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16039 {
16040 op = DW_OP_div;
16041 goto do_binop;
16042 }
16043 mem_loc_result = typed_binop (DW_OP_div, rtl,
16044 base_type_for_mode (int_mode, 1),
16045 int_mode, mem_mode);
16046 }
16047 break;
16048
16049 case NOT:
16050 op = DW_OP_not;
16051 goto do_unop;
16052
16053 case ABS:
16054 op = DW_OP_abs;
16055 goto do_unop;
16056
16057 case NEG:
16058 op = DW_OP_neg;
16059 goto do_unop;
16060
16061 do_unop:
16062 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16063 VAR_INIT_STATUS_INITIALIZED);
16064
16065 if (op0 == 0)
16066 break;
16067
16068 mem_loc_result = op0;
16069 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16070 break;
16071
16072 case CONST_INT:
16073 if (!is_a <scalar_int_mode> (mode, &int_mode)
16074 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16075 #ifdef POINTERS_EXTEND_UNSIGNED
16076 || (int_mode == Pmode
16077 && mem_mode != VOIDmode
16078 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16079 #endif
16080 )
16081 {
16082 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16083 break;
16084 }
16085 if ((!dwarf_strict || dwarf_version >= 5)
16086 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16087 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16088 {
16089 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16090 scalar_int_mode amode;
16091 if (type_die == NULL)
16092 return NULL;
16093 if (INTVAL (rtl) >= 0
16094 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16095 .exists (&amode))
16096 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16097 /* const DW_OP_convert <XXX> vs.
16098 DW_OP_const_type <XXX, 1, const>. */
16099 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16100 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16101 {
16102 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16103 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16104 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16105 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16106 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16107 add_loc_descr (&mem_loc_result, op0);
16108 return mem_loc_result;
16109 }
16110 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16111 INTVAL (rtl));
16112 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16113 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16114 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16115 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16116 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16117 else
16118 {
16119 mem_loc_result->dw_loc_oprnd2.val_class
16120 = dw_val_class_const_double;
16121 mem_loc_result->dw_loc_oprnd2.v.val_double
16122 = double_int::from_shwi (INTVAL (rtl));
16123 }
16124 }
16125 break;
16126
16127 case CONST_DOUBLE:
16128 if (!dwarf_strict || dwarf_version >= 5)
16129 {
16130 dw_die_ref type_die;
16131
16132 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16133 CONST_DOUBLE rtx could represent either a large integer
16134 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16135 the value is always a floating point constant.
16136
16137 When it is an integer, a CONST_DOUBLE is used whenever
16138 the constant requires 2 HWIs to be adequately represented.
16139 We output CONST_DOUBLEs as blocks. */
16140 if (mode == VOIDmode
16141 || (GET_MODE (rtl) == VOIDmode
16142 && maybe_ne (GET_MODE_BITSIZE (mode),
16143 HOST_BITS_PER_DOUBLE_INT)))
16144 break;
16145 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16146 if (type_die == NULL)
16147 return NULL;
16148 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16149 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16150 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16151 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16152 #if TARGET_SUPPORTS_WIDE_INT == 0
16153 if (!SCALAR_FLOAT_MODE_P (mode))
16154 {
16155 mem_loc_result->dw_loc_oprnd2.val_class
16156 = dw_val_class_const_double;
16157 mem_loc_result->dw_loc_oprnd2.v.val_double
16158 = rtx_to_double_int (rtl);
16159 }
16160 else
16161 #endif
16162 {
16163 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16164 unsigned int length = GET_MODE_SIZE (float_mode);
16165 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16166
16167 insert_float (rtl, array);
16168 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16169 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16170 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16171 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16172 }
16173 }
16174 break;
16175
16176 case CONST_WIDE_INT:
16177 if (!dwarf_strict || dwarf_version >= 5)
16178 {
16179 dw_die_ref type_die;
16180
16181 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16182 if (type_die == NULL)
16183 return NULL;
16184 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16185 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16186 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16187 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16188 mem_loc_result->dw_loc_oprnd2.val_class
16189 = dw_val_class_wide_int;
16190 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16191 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16192 }
16193 break;
16194
16195 case CONST_POLY_INT:
16196 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16197 break;
16198
16199 case EQ:
16200 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16201 break;
16202
16203 case GE:
16204 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16205 break;
16206
16207 case GT:
16208 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16209 break;
16210
16211 case LE:
16212 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16213 break;
16214
16215 case LT:
16216 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16217 break;
16218
16219 case NE:
16220 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16221 break;
16222
16223 case GEU:
16224 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16225 break;
16226
16227 case GTU:
16228 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16229 break;
16230
16231 case LEU:
16232 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16233 break;
16234
16235 case LTU:
16236 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16237 break;
16238
16239 case UMIN:
16240 case UMAX:
16241 if (!SCALAR_INT_MODE_P (mode))
16242 break;
16243 /* FALLTHRU */
16244 case SMIN:
16245 case SMAX:
16246 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16247 break;
16248
16249 case ZERO_EXTRACT:
16250 case SIGN_EXTRACT:
16251 if (CONST_INT_P (XEXP (rtl, 1))
16252 && CONST_INT_P (XEXP (rtl, 2))
16253 && is_a <scalar_int_mode> (mode, &int_mode)
16254 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16255 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16256 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16257 && ((unsigned) INTVAL (XEXP (rtl, 1))
16258 + (unsigned) INTVAL (XEXP (rtl, 2))
16259 <= GET_MODE_BITSIZE (int_mode)))
16260 {
16261 int shift, size;
16262 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16263 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16264 if (op0 == 0)
16265 break;
16266 if (GET_CODE (rtl) == SIGN_EXTRACT)
16267 op = DW_OP_shra;
16268 else
16269 op = DW_OP_shr;
16270 mem_loc_result = op0;
16271 size = INTVAL (XEXP (rtl, 1));
16272 shift = INTVAL (XEXP (rtl, 2));
16273 if (BITS_BIG_ENDIAN)
16274 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16275 if (shift + size != (int) DWARF2_ADDR_SIZE)
16276 {
16277 add_loc_descr (&mem_loc_result,
16278 int_loc_descriptor (DWARF2_ADDR_SIZE
16279 - shift - size));
16280 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16281 }
16282 if (size != (int) DWARF2_ADDR_SIZE)
16283 {
16284 add_loc_descr (&mem_loc_result,
16285 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16286 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16287 }
16288 }
16289 break;
16290
16291 case IF_THEN_ELSE:
16292 {
16293 dw_loc_descr_ref op2, bra_node, drop_node;
16294 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16295 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16296 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16297 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16298 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16299 VAR_INIT_STATUS_INITIALIZED);
16300 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16301 VAR_INIT_STATUS_INITIALIZED);
16302 if (op0 == NULL || op1 == NULL || op2 == NULL)
16303 break;
16304
16305 mem_loc_result = op1;
16306 add_loc_descr (&mem_loc_result, op2);
16307 add_loc_descr (&mem_loc_result, op0);
16308 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16309 add_loc_descr (&mem_loc_result, bra_node);
16310 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16311 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16312 add_loc_descr (&mem_loc_result, drop_node);
16313 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16314 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16315 }
16316 break;
16317
16318 case FLOAT_EXTEND:
16319 case FLOAT_TRUNCATE:
16320 case FLOAT:
16321 case UNSIGNED_FLOAT:
16322 case FIX:
16323 case UNSIGNED_FIX:
16324 if (!dwarf_strict || dwarf_version >= 5)
16325 {
16326 dw_die_ref type_die;
16327 dw_loc_descr_ref cvt;
16328
16329 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16330 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16331 if (op0 == NULL)
16332 break;
16333 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16334 && (GET_CODE (rtl) == FLOAT
16335 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16336 {
16337 type_die = base_type_for_mode (int_mode,
16338 GET_CODE (rtl) == UNSIGNED_FLOAT);
16339 if (type_die == NULL)
16340 break;
16341 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16342 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16343 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16344 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16345 add_loc_descr (&op0, cvt);
16346 }
16347 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16348 if (type_die == NULL)
16349 break;
16350 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16351 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16352 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16353 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16354 add_loc_descr (&op0, cvt);
16355 if (is_a <scalar_int_mode> (mode, &int_mode)
16356 && (GET_CODE (rtl) == FIX
16357 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16358 {
16359 op0 = convert_descriptor_to_mode (int_mode, op0);
16360 if (op0 == NULL)
16361 break;
16362 }
16363 mem_loc_result = op0;
16364 }
16365 break;
16366
16367 case CLZ:
16368 case CTZ:
16369 case FFS:
16370 if (is_a <scalar_int_mode> (mode, &int_mode))
16371 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16372 break;
16373
16374 case POPCOUNT:
16375 case PARITY:
16376 if (is_a <scalar_int_mode> (mode, &int_mode))
16377 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16378 break;
16379
16380 case BSWAP:
16381 if (is_a <scalar_int_mode> (mode, &int_mode))
16382 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16383 break;
16384
16385 case ROTATE:
16386 case ROTATERT:
16387 if (is_a <scalar_int_mode> (mode, &int_mode))
16388 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16389 break;
16390
16391 case COMPARE:
16392 /* In theory, we could implement the above. */
16393 /* DWARF cannot represent the unsigned compare operations
16394 natively. */
16395 case SS_MULT:
16396 case US_MULT:
16397 case SS_DIV:
16398 case US_DIV:
16399 case SS_PLUS:
16400 case US_PLUS:
16401 case SS_MINUS:
16402 case US_MINUS:
16403 case SS_NEG:
16404 case US_NEG:
16405 case SS_ABS:
16406 case SS_ASHIFT:
16407 case US_ASHIFT:
16408 case SS_TRUNCATE:
16409 case US_TRUNCATE:
16410 case UNORDERED:
16411 case ORDERED:
16412 case UNEQ:
16413 case UNGE:
16414 case UNGT:
16415 case UNLE:
16416 case UNLT:
16417 case LTGT:
16418 case FRACT_CONVERT:
16419 case UNSIGNED_FRACT_CONVERT:
16420 case SAT_FRACT:
16421 case UNSIGNED_SAT_FRACT:
16422 case SQRT:
16423 case ASM_OPERANDS:
16424 case VEC_MERGE:
16425 case VEC_SELECT:
16426 case VEC_CONCAT:
16427 case VEC_DUPLICATE:
16428 case VEC_SERIES:
16429 case HIGH:
16430 case FMA:
16431 case STRICT_LOW_PART:
16432 case CONST_VECTOR:
16433 case CONST_FIXED:
16434 case CLRSB:
16435 case CLOBBER:
16436 break;
16437
16438 case CONST_STRING:
16439 resolve_one_addr (&rtl);
16440 goto symref;
16441
16442 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16443 the expression. An UNSPEC rtx represents a raw DWARF operation,
16444 new_loc_descr is called for it to build the operation directly.
16445 Otherwise mem_loc_descriptor is called recursively. */
16446 case PARALLEL:
16447 {
16448 int index = 0;
16449 dw_loc_descr_ref exp_result = NULL;
16450
16451 for (; index < XVECLEN (rtl, 0); index++)
16452 {
16453 rtx elem = XVECEXP (rtl, 0, index);
16454 if (GET_CODE (elem) == UNSPEC)
16455 {
16456 /* Each DWARF operation UNSPEC contain two operands, if
16457 one operand is not used for the operation, const0_rtx is
16458 passed. */
16459 gcc_assert (XVECLEN (elem, 0) == 2);
16460
16461 HOST_WIDE_INT dw_op = XINT (elem, 1);
16462 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16463 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16464 exp_result
16465 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16466 oprnd2);
16467 }
16468 else
16469 exp_result
16470 = mem_loc_descriptor (elem, mode, mem_mode,
16471 VAR_INIT_STATUS_INITIALIZED);
16472
16473 if (!mem_loc_result)
16474 mem_loc_result = exp_result;
16475 else
16476 add_loc_descr (&mem_loc_result, exp_result);
16477 }
16478
16479 break;
16480 }
16481
16482 default:
16483 if (flag_checking)
16484 {
16485 print_rtl (stderr, rtl);
16486 gcc_unreachable ();
16487 }
16488 break;
16489 }
16490
16491 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16492 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16493
16494 return mem_loc_result;
16495 }
16496
16497 /* Return a descriptor that describes the concatenation of two locations.
16498 This is typically a complex variable. */
16499
16500 static dw_loc_descr_ref
16501 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16502 {
16503 /* At present we only track constant-sized pieces. */
16504 unsigned int size0, size1;
16505 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16506 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16507 return 0;
16508
16509 dw_loc_descr_ref cc_loc_result = NULL;
16510 dw_loc_descr_ref x0_ref
16511 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16512 dw_loc_descr_ref x1_ref
16513 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16514
16515 if (x0_ref == 0 || x1_ref == 0)
16516 return 0;
16517
16518 cc_loc_result = x0_ref;
16519 add_loc_descr_op_piece (&cc_loc_result, size0);
16520
16521 add_loc_descr (&cc_loc_result, x1_ref);
16522 add_loc_descr_op_piece (&cc_loc_result, size1);
16523
16524 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16525 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16526
16527 return cc_loc_result;
16528 }
16529
16530 /* Return a descriptor that describes the concatenation of N
16531 locations. */
16532
16533 static dw_loc_descr_ref
16534 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16535 {
16536 unsigned int i;
16537 dw_loc_descr_ref cc_loc_result = NULL;
16538 unsigned int n = XVECLEN (concatn, 0);
16539 unsigned int size;
16540
16541 for (i = 0; i < n; ++i)
16542 {
16543 dw_loc_descr_ref ref;
16544 rtx x = XVECEXP (concatn, 0, i);
16545
16546 /* At present we only track constant-sized pieces. */
16547 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16548 return NULL;
16549
16550 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16551 if (ref == NULL)
16552 return NULL;
16553
16554 add_loc_descr (&cc_loc_result, ref);
16555 add_loc_descr_op_piece (&cc_loc_result, size);
16556 }
16557
16558 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16559 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16560
16561 return cc_loc_result;
16562 }
16563
16564 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16565 for DEBUG_IMPLICIT_PTR RTL. */
16566
16567 static dw_loc_descr_ref
16568 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16569 {
16570 dw_loc_descr_ref ret;
16571 dw_die_ref ref;
16572
16573 if (dwarf_strict && dwarf_version < 5)
16574 return NULL;
16575 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16576 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16577 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16578 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16579 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16580 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16581 if (ref)
16582 {
16583 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16584 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16585 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16586 }
16587 else
16588 {
16589 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16590 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16591 }
16592 return ret;
16593 }
16594
16595 /* Output a proper Dwarf location descriptor for a variable or parameter
16596 which is either allocated in a register or in a memory location. For a
16597 register, we just generate an OP_REG and the register number. For a
16598 memory location we provide a Dwarf postfix expression describing how to
16599 generate the (dynamic) address of the object onto the address stack.
16600
16601 MODE is mode of the decl if this loc_descriptor is going to be used in
16602 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16603 allowed, VOIDmode otherwise.
16604
16605 If we don't know how to describe it, return 0. */
16606
16607 static dw_loc_descr_ref
16608 loc_descriptor (rtx rtl, machine_mode mode,
16609 enum var_init_status initialized)
16610 {
16611 dw_loc_descr_ref loc_result = NULL;
16612 scalar_int_mode int_mode;
16613
16614 switch (GET_CODE (rtl))
16615 {
16616 case SUBREG:
16617 /* The case of a subreg may arise when we have a local (register)
16618 variable or a formal (register) parameter which doesn't quite fill
16619 up an entire register. For now, just assume that it is
16620 legitimate to make the Dwarf info refer to the whole register which
16621 contains the given subreg. */
16622 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16623 loc_result = loc_descriptor (SUBREG_REG (rtl),
16624 GET_MODE (SUBREG_REG (rtl)), initialized);
16625 else
16626 goto do_default;
16627 break;
16628
16629 case REG:
16630 loc_result = reg_loc_descriptor (rtl, initialized);
16631 break;
16632
16633 case MEM:
16634 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16635 GET_MODE (rtl), initialized);
16636 if (loc_result == NULL)
16637 loc_result = tls_mem_loc_descriptor (rtl);
16638 if (loc_result == NULL)
16639 {
16640 rtx new_rtl = avoid_constant_pool_reference (rtl);
16641 if (new_rtl != rtl)
16642 loc_result = loc_descriptor (new_rtl, mode, initialized);
16643 }
16644 break;
16645
16646 case CONCAT:
16647 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16648 initialized);
16649 break;
16650
16651 case CONCATN:
16652 loc_result = concatn_loc_descriptor (rtl, initialized);
16653 break;
16654
16655 case VAR_LOCATION:
16656 /* Single part. */
16657 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16658 {
16659 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16660 if (GET_CODE (loc) == EXPR_LIST)
16661 loc = XEXP (loc, 0);
16662 loc_result = loc_descriptor (loc, mode, initialized);
16663 break;
16664 }
16665
16666 rtl = XEXP (rtl, 1);
16667 /* FALLTHRU */
16668
16669 case PARALLEL:
16670 {
16671 rtvec par_elems = XVEC (rtl, 0);
16672 int num_elem = GET_NUM_ELEM (par_elems);
16673 machine_mode mode;
16674 int i, size;
16675
16676 /* Create the first one, so we have something to add to. */
16677 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16678 VOIDmode, initialized);
16679 if (loc_result == NULL)
16680 return NULL;
16681 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16682 /* At present we only track constant-sized pieces. */
16683 if (!GET_MODE_SIZE (mode).is_constant (&size))
16684 return NULL;
16685 add_loc_descr_op_piece (&loc_result, size);
16686 for (i = 1; i < num_elem; i++)
16687 {
16688 dw_loc_descr_ref temp;
16689
16690 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16691 VOIDmode, initialized);
16692 if (temp == NULL)
16693 return NULL;
16694 add_loc_descr (&loc_result, temp);
16695 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16696 /* At present we only track constant-sized pieces. */
16697 if (!GET_MODE_SIZE (mode).is_constant (&size))
16698 return NULL;
16699 add_loc_descr_op_piece (&loc_result, size);
16700 }
16701 }
16702 break;
16703
16704 case CONST_INT:
16705 if (mode != VOIDmode && mode != BLKmode)
16706 {
16707 int_mode = as_a <scalar_int_mode> (mode);
16708 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16709 INTVAL (rtl));
16710 }
16711 break;
16712
16713 case CONST_DOUBLE:
16714 if (mode == VOIDmode)
16715 mode = GET_MODE (rtl);
16716
16717 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16718 {
16719 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16720
16721 /* Note that a CONST_DOUBLE rtx could represent either an integer
16722 or a floating-point constant. A CONST_DOUBLE is used whenever
16723 the constant requires more than one word in order to be
16724 adequately represented. We output CONST_DOUBLEs as blocks. */
16725 scalar_mode smode = as_a <scalar_mode> (mode);
16726 loc_result = new_loc_descr (DW_OP_implicit_value,
16727 GET_MODE_SIZE (smode), 0);
16728 #if TARGET_SUPPORTS_WIDE_INT == 0
16729 if (!SCALAR_FLOAT_MODE_P (smode))
16730 {
16731 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16732 loc_result->dw_loc_oprnd2.v.val_double
16733 = rtx_to_double_int (rtl);
16734 }
16735 else
16736 #endif
16737 {
16738 unsigned int length = GET_MODE_SIZE (smode);
16739 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16740
16741 insert_float (rtl, array);
16742 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16743 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16744 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16745 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16746 }
16747 }
16748 break;
16749
16750 case CONST_WIDE_INT:
16751 if (mode == VOIDmode)
16752 mode = GET_MODE (rtl);
16753
16754 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16755 {
16756 int_mode = as_a <scalar_int_mode> (mode);
16757 loc_result = new_loc_descr (DW_OP_implicit_value,
16758 GET_MODE_SIZE (int_mode), 0);
16759 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16760 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16761 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16762 }
16763 break;
16764
16765 case CONST_VECTOR:
16766 if (mode == VOIDmode)
16767 mode = GET_MODE (rtl);
16768
16769 if (mode != VOIDmode
16770 /* The combination of a length and byte elt_size doesn't extend
16771 naturally to boolean vectors, where several elements are packed
16772 into the same byte. */
16773 && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL
16774 && (dwarf_version >= 4 || !dwarf_strict))
16775 {
16776 unsigned int length;
16777 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16778 return NULL;
16779
16780 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16781 unsigned char *array
16782 = ggc_vec_alloc<unsigned char> (length * elt_size);
16783 unsigned int i;
16784 unsigned char *p;
16785 machine_mode imode = GET_MODE_INNER (mode);
16786
16787 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16788 switch (GET_MODE_CLASS (mode))
16789 {
16790 case MODE_VECTOR_INT:
16791 for (i = 0, p = array; i < length; i++, p += elt_size)
16792 {
16793 rtx elt = CONST_VECTOR_ELT (rtl, i);
16794 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16795 }
16796 break;
16797
16798 case MODE_VECTOR_FLOAT:
16799 for (i = 0, p = array; i < length; i++, p += elt_size)
16800 {
16801 rtx elt = CONST_VECTOR_ELT (rtl, i);
16802 insert_float (elt, p);
16803 }
16804 break;
16805
16806 default:
16807 gcc_unreachable ();
16808 }
16809
16810 loc_result = new_loc_descr (DW_OP_implicit_value,
16811 length * elt_size, 0);
16812 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16813 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16814 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16815 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16816 }
16817 break;
16818
16819 case CONST:
16820 if (mode == VOIDmode
16821 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16822 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16823 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16824 {
16825 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16826 break;
16827 }
16828 /* FALLTHROUGH */
16829 case SYMBOL_REF:
16830 if (!const_ok_for_output (rtl))
16831 break;
16832 /* FALLTHROUGH */
16833 case LABEL_REF:
16834 if (is_a <scalar_int_mode> (mode, &int_mode)
16835 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16836 && (dwarf_version >= 4 || !dwarf_strict))
16837 {
16838 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16839 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16840 vec_safe_push (used_rtx_array, rtl);
16841 }
16842 break;
16843
16844 case DEBUG_IMPLICIT_PTR:
16845 loc_result = implicit_ptr_descriptor (rtl, 0);
16846 break;
16847
16848 case PLUS:
16849 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16850 && CONST_INT_P (XEXP (rtl, 1)))
16851 {
16852 loc_result
16853 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16854 break;
16855 }
16856 /* FALLTHRU */
16857 do_default:
16858 default:
16859 if ((is_a <scalar_int_mode> (mode, &int_mode)
16860 && GET_MODE (rtl) == int_mode
16861 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16862 && dwarf_version >= 4)
16863 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16864 {
16865 /* Value expression. */
16866 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16867 if (loc_result)
16868 add_loc_descr (&loc_result,
16869 new_loc_descr (DW_OP_stack_value, 0, 0));
16870 }
16871 break;
16872 }
16873
16874 return loc_result;
16875 }
16876
16877 /* We need to figure out what section we should use as the base for the
16878 address ranges where a given location is valid.
16879 1. If this particular DECL has a section associated with it, use that.
16880 2. If this function has a section associated with it, use that.
16881 3. Otherwise, use the text section.
16882 XXX: If you split a variable across multiple sections, we won't notice. */
16883
16884 static const char *
16885 secname_for_decl (const_tree decl)
16886 {
16887 const char *secname;
16888
16889 if (VAR_OR_FUNCTION_DECL_P (decl)
16890 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16891 && DECL_SECTION_NAME (decl))
16892 secname = DECL_SECTION_NAME (decl);
16893 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16894 {
16895 if (in_cold_section_p)
16896 {
16897 section *sec = current_function_section ();
16898 if (sec->common.flags & SECTION_NAMED)
16899 return sec->named.name;
16900 }
16901 secname = DECL_SECTION_NAME (current_function_decl);
16902 }
16903 else if (cfun && in_cold_section_p)
16904 secname = crtl->subsections.cold_section_label;
16905 else
16906 secname = text_section_label;
16907
16908 return secname;
16909 }
16910
16911 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16912
16913 static bool
16914 decl_by_reference_p (tree decl)
16915 {
16916 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16917 || VAR_P (decl))
16918 && DECL_BY_REFERENCE (decl));
16919 }
16920
16921 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16922 for VARLOC. */
16923
16924 static dw_loc_descr_ref
16925 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16926 enum var_init_status initialized)
16927 {
16928 int have_address = 0;
16929 dw_loc_descr_ref descr;
16930 machine_mode mode;
16931
16932 if (want_address != 2)
16933 {
16934 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16935 /* Single part. */
16936 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16937 {
16938 varloc = PAT_VAR_LOCATION_LOC (varloc);
16939 if (GET_CODE (varloc) == EXPR_LIST)
16940 varloc = XEXP (varloc, 0);
16941 mode = GET_MODE (varloc);
16942 if (MEM_P (varloc))
16943 {
16944 rtx addr = XEXP (varloc, 0);
16945 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16946 mode, initialized);
16947 if (descr)
16948 have_address = 1;
16949 else
16950 {
16951 rtx x = avoid_constant_pool_reference (varloc);
16952 if (x != varloc)
16953 descr = mem_loc_descriptor (x, mode, VOIDmode,
16954 initialized);
16955 }
16956 }
16957 else
16958 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16959 }
16960 else
16961 return 0;
16962 }
16963 else
16964 {
16965 if (GET_CODE (varloc) == VAR_LOCATION)
16966 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16967 else
16968 mode = DECL_MODE (loc);
16969 descr = loc_descriptor (varloc, mode, initialized);
16970 have_address = 1;
16971 }
16972
16973 if (!descr)
16974 return 0;
16975
16976 if (want_address == 2 && !have_address
16977 && (dwarf_version >= 4 || !dwarf_strict))
16978 {
16979 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16980 {
16981 expansion_failed (loc, NULL_RTX,
16982 "DWARF address size mismatch");
16983 return 0;
16984 }
16985 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16986 have_address = 1;
16987 }
16988 /* Show if we can't fill the request for an address. */
16989 if (want_address && !have_address)
16990 {
16991 expansion_failed (loc, NULL_RTX,
16992 "Want address and only have value");
16993 return 0;
16994 }
16995
16996 /* If we've got an address and don't want one, dereference. */
16997 if (!want_address && have_address)
16998 {
16999 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
17000 enum dwarf_location_atom op;
17001
17002 if (size > DWARF2_ADDR_SIZE || size == -1)
17003 {
17004 expansion_failed (loc, NULL_RTX,
17005 "DWARF address size mismatch");
17006 return 0;
17007 }
17008 else if (size == DWARF2_ADDR_SIZE)
17009 op = DW_OP_deref;
17010 else
17011 op = DW_OP_deref_size;
17012
17013 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17014 }
17015
17016 return descr;
17017 }
17018
17019 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17020 if it is not possible. */
17021
17022 static dw_loc_descr_ref
17023 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17024 {
17025 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17026 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17027 else if (dwarf_version >= 3 || !dwarf_strict)
17028 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17029 else
17030 return NULL;
17031 }
17032
17033 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17034 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17035
17036 static dw_loc_descr_ref
17037 dw_sra_loc_expr (tree decl, rtx loc)
17038 {
17039 rtx p;
17040 unsigned HOST_WIDE_INT padsize = 0;
17041 dw_loc_descr_ref descr, *descr_tail;
17042 unsigned HOST_WIDE_INT decl_size;
17043 rtx varloc;
17044 enum var_init_status initialized;
17045
17046 if (DECL_SIZE (decl) == NULL
17047 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17048 return NULL;
17049
17050 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17051 descr = NULL;
17052 descr_tail = &descr;
17053
17054 for (p = loc; p; p = XEXP (p, 1))
17055 {
17056 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17057 rtx loc_note = *decl_piece_varloc_ptr (p);
17058 dw_loc_descr_ref cur_descr;
17059 dw_loc_descr_ref *tail, last = NULL;
17060 unsigned HOST_WIDE_INT opsize = 0;
17061
17062 if (loc_note == NULL_RTX
17063 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17064 {
17065 padsize += bitsize;
17066 continue;
17067 }
17068 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17069 varloc = NOTE_VAR_LOCATION (loc_note);
17070 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17071 if (cur_descr == NULL)
17072 {
17073 padsize += bitsize;
17074 continue;
17075 }
17076
17077 /* Check that cur_descr either doesn't use
17078 DW_OP_*piece operations, or their sum is equal
17079 to bitsize. Otherwise we can't embed it. */
17080 for (tail = &cur_descr; *tail != NULL;
17081 tail = &(*tail)->dw_loc_next)
17082 if ((*tail)->dw_loc_opc == DW_OP_piece)
17083 {
17084 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17085 * BITS_PER_UNIT;
17086 last = *tail;
17087 }
17088 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17089 {
17090 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17091 last = *tail;
17092 }
17093
17094 if (last != NULL && opsize != bitsize)
17095 {
17096 padsize += bitsize;
17097 /* Discard the current piece of the descriptor and release any
17098 addr_table entries it uses. */
17099 remove_loc_list_addr_table_entries (cur_descr);
17100 continue;
17101 }
17102
17103 /* If there is a hole, add DW_OP_*piece after empty DWARF
17104 expression, which means that those bits are optimized out. */
17105 if (padsize)
17106 {
17107 if (padsize > decl_size)
17108 {
17109 remove_loc_list_addr_table_entries (cur_descr);
17110 goto discard_descr;
17111 }
17112 decl_size -= padsize;
17113 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17114 if (*descr_tail == NULL)
17115 {
17116 remove_loc_list_addr_table_entries (cur_descr);
17117 goto discard_descr;
17118 }
17119 descr_tail = &(*descr_tail)->dw_loc_next;
17120 padsize = 0;
17121 }
17122 *descr_tail = cur_descr;
17123 descr_tail = tail;
17124 if (bitsize > decl_size)
17125 goto discard_descr;
17126 decl_size -= bitsize;
17127 if (last == NULL)
17128 {
17129 HOST_WIDE_INT offset = 0;
17130 if (GET_CODE (varloc) == VAR_LOCATION
17131 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17132 {
17133 varloc = PAT_VAR_LOCATION_LOC (varloc);
17134 if (GET_CODE (varloc) == EXPR_LIST)
17135 varloc = XEXP (varloc, 0);
17136 }
17137 do
17138 {
17139 if (GET_CODE (varloc) == CONST
17140 || GET_CODE (varloc) == SIGN_EXTEND
17141 || GET_CODE (varloc) == ZERO_EXTEND)
17142 varloc = XEXP (varloc, 0);
17143 else if (GET_CODE (varloc) == SUBREG)
17144 varloc = SUBREG_REG (varloc);
17145 else
17146 break;
17147 }
17148 while (1);
17149 /* DW_OP_bit_size offset should be zero for register
17150 or implicit location descriptions and empty location
17151 descriptions, but for memory addresses needs big endian
17152 adjustment. */
17153 if (MEM_P (varloc))
17154 {
17155 unsigned HOST_WIDE_INT memsize;
17156 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17157 goto discard_descr;
17158 memsize *= BITS_PER_UNIT;
17159 if (memsize != bitsize)
17160 {
17161 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17162 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17163 goto discard_descr;
17164 if (memsize < bitsize)
17165 goto discard_descr;
17166 if (BITS_BIG_ENDIAN)
17167 offset = memsize - bitsize;
17168 }
17169 }
17170
17171 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17172 if (*descr_tail == NULL)
17173 goto discard_descr;
17174 descr_tail = &(*descr_tail)->dw_loc_next;
17175 }
17176 }
17177
17178 /* If there were any non-empty expressions, add padding till the end of
17179 the decl. */
17180 if (descr != NULL && decl_size != 0)
17181 {
17182 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17183 if (*descr_tail == NULL)
17184 goto discard_descr;
17185 }
17186 return descr;
17187
17188 discard_descr:
17189 /* Discard the descriptor and release any addr_table entries it uses. */
17190 remove_loc_list_addr_table_entries (descr);
17191 return NULL;
17192 }
17193
17194 /* Return the dwarf representation of the location list LOC_LIST of
17195 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17196 function. */
17197
17198 static dw_loc_list_ref
17199 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17200 {
17201 const char *endname, *secname;
17202 var_loc_view endview;
17203 rtx varloc;
17204 enum var_init_status initialized;
17205 struct var_loc_node *node;
17206 dw_loc_descr_ref descr;
17207 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17208 dw_loc_list_ref list = NULL;
17209 dw_loc_list_ref *listp = &list;
17210
17211 /* Now that we know what section we are using for a base,
17212 actually construct the list of locations.
17213 The first location information is what is passed to the
17214 function that creates the location list, and the remaining
17215 locations just get added on to that list.
17216 Note that we only know the start address for a location
17217 (IE location changes), so to build the range, we use
17218 the range [current location start, next location start].
17219 This means we have to special case the last node, and generate
17220 a range of [last location start, end of function label]. */
17221
17222 if (cfun && crtl->has_bb_partition)
17223 {
17224 bool save_in_cold_section_p = in_cold_section_p;
17225 in_cold_section_p = first_function_block_is_cold;
17226 if (loc_list->last_before_switch == NULL)
17227 in_cold_section_p = !in_cold_section_p;
17228 secname = secname_for_decl (decl);
17229 in_cold_section_p = save_in_cold_section_p;
17230 }
17231 else
17232 secname = secname_for_decl (decl);
17233
17234 for (node = loc_list->first; node; node = node->next)
17235 {
17236 bool range_across_switch = false;
17237 if (GET_CODE (node->loc) == EXPR_LIST
17238 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17239 {
17240 if (GET_CODE (node->loc) == EXPR_LIST)
17241 {
17242 descr = NULL;
17243 /* This requires DW_OP_{,bit_}piece, which is not usable
17244 inside DWARF expressions. */
17245 if (want_address == 2)
17246 descr = dw_sra_loc_expr (decl, node->loc);
17247 }
17248 else
17249 {
17250 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17251 varloc = NOTE_VAR_LOCATION (node->loc);
17252 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17253 }
17254 if (descr)
17255 {
17256 /* If section switch happens in between node->label
17257 and node->next->label (or end of function) and
17258 we can't emit it as a single entry list,
17259 emit two ranges, first one ending at the end
17260 of first partition and second one starting at the
17261 beginning of second partition. */
17262 if (node == loc_list->last_before_switch
17263 && (node != loc_list->first || loc_list->first->next
17264 /* If we are to emit a view number, we will emit
17265 a loclist rather than a single location
17266 expression for the entire function (see
17267 loc_list_has_views), so we have to split the
17268 range that straddles across partitions. */
17269 || !ZERO_VIEW_P (node->view))
17270 && current_function_decl)
17271 {
17272 endname = cfun->fde->dw_fde_end;
17273 endview = 0;
17274 range_across_switch = true;
17275 }
17276 /* The variable has a location between NODE->LABEL and
17277 NODE->NEXT->LABEL. */
17278 else if (node->next)
17279 endname = node->next->label, endview = node->next->view;
17280 /* If the variable has a location at the last label
17281 it keeps its location until the end of function. */
17282 else if (!current_function_decl)
17283 endname = text_end_label, endview = 0;
17284 else
17285 {
17286 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17287 current_function_funcdef_no);
17288 endname = ggc_strdup (label_id);
17289 endview = 0;
17290 }
17291
17292 *listp = new_loc_list (descr, node->label, node->view,
17293 endname, endview, secname);
17294 if (TREE_CODE (decl) == PARM_DECL
17295 && node == loc_list->first
17296 && NOTE_P (node->loc)
17297 && strcmp (node->label, endname) == 0)
17298 (*listp)->force = true;
17299 listp = &(*listp)->dw_loc_next;
17300 }
17301 }
17302
17303 if (cfun
17304 && crtl->has_bb_partition
17305 && node == loc_list->last_before_switch)
17306 {
17307 bool save_in_cold_section_p = in_cold_section_p;
17308 in_cold_section_p = !first_function_block_is_cold;
17309 secname = secname_for_decl (decl);
17310 in_cold_section_p = save_in_cold_section_p;
17311 }
17312
17313 if (range_across_switch)
17314 {
17315 if (GET_CODE (node->loc) == EXPR_LIST)
17316 descr = dw_sra_loc_expr (decl, node->loc);
17317 else
17318 {
17319 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17320 varloc = NOTE_VAR_LOCATION (node->loc);
17321 descr = dw_loc_list_1 (decl, varloc, want_address,
17322 initialized);
17323 }
17324 gcc_assert (descr);
17325 /* The variable has a location between NODE->LABEL and
17326 NODE->NEXT->LABEL. */
17327 if (node->next)
17328 endname = node->next->label, endview = node->next->view;
17329 else
17330 endname = cfun->fde->dw_fde_second_end, endview = 0;
17331 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17332 endname, endview, secname);
17333 listp = &(*listp)->dw_loc_next;
17334 }
17335 }
17336
17337 /* Try to avoid the overhead of a location list emitting a location
17338 expression instead, but only if we didn't have more than one
17339 location entry in the first place. If some entries were not
17340 representable, we don't want to pretend a single entry that was
17341 applies to the entire scope in which the variable is
17342 available. */
17343 if (list && loc_list->first->next)
17344 gen_llsym (list);
17345 else
17346 maybe_gen_llsym (list);
17347
17348 return list;
17349 }
17350
17351 /* Return if the loc_list has only single element and thus can be represented
17352 as location description. */
17353
17354 static bool
17355 single_element_loc_list_p (dw_loc_list_ref list)
17356 {
17357 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17358 return !list->ll_symbol;
17359 }
17360
17361 /* Duplicate a single element of location list. */
17362
17363 static inline dw_loc_descr_ref
17364 copy_loc_descr (dw_loc_descr_ref ref)
17365 {
17366 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17367 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17368 return copy;
17369 }
17370
17371 /* To each location in list LIST append loc descr REF. */
17372
17373 static void
17374 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17375 {
17376 dw_loc_descr_ref copy;
17377 add_loc_descr (&list->expr, ref);
17378 list = list->dw_loc_next;
17379 while (list)
17380 {
17381 copy = copy_loc_descr (ref);
17382 add_loc_descr (&list->expr, copy);
17383 while (copy->dw_loc_next)
17384 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17385 list = list->dw_loc_next;
17386 }
17387 }
17388
17389 /* To each location in list LIST prepend loc descr REF. */
17390
17391 static void
17392 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17393 {
17394 dw_loc_descr_ref copy;
17395 dw_loc_descr_ref ref_end = list->expr;
17396 add_loc_descr (&ref, list->expr);
17397 list->expr = ref;
17398 list = list->dw_loc_next;
17399 while (list)
17400 {
17401 dw_loc_descr_ref end = list->expr;
17402 list->expr = copy = copy_loc_descr (ref);
17403 while (copy->dw_loc_next != ref_end)
17404 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17405 copy->dw_loc_next = end;
17406 list = list->dw_loc_next;
17407 }
17408 }
17409
17410 /* Given two lists RET and LIST
17411 produce location list that is result of adding expression in LIST
17412 to expression in RET on each position in program.
17413 Might be destructive on both RET and LIST.
17414
17415 TODO: We handle only simple cases of RET or LIST having at most one
17416 element. General case would involve sorting the lists in program order
17417 and merging them that will need some additional work.
17418 Adding that will improve quality of debug info especially for SRA-ed
17419 structures. */
17420
17421 static void
17422 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17423 {
17424 if (!list)
17425 return;
17426 if (!*ret)
17427 {
17428 *ret = list;
17429 return;
17430 }
17431 if (!list->dw_loc_next)
17432 {
17433 add_loc_descr_to_each (*ret, list->expr);
17434 return;
17435 }
17436 if (!(*ret)->dw_loc_next)
17437 {
17438 prepend_loc_descr_to_each (list, (*ret)->expr);
17439 *ret = list;
17440 return;
17441 }
17442 expansion_failed (NULL_TREE, NULL_RTX,
17443 "Don't know how to merge two non-trivial"
17444 " location lists.\n");
17445 *ret = NULL;
17446 return;
17447 }
17448
17449 /* LOC is constant expression. Try a luck, look it up in constant
17450 pool and return its loc_descr of its address. */
17451
17452 static dw_loc_descr_ref
17453 cst_pool_loc_descr (tree loc)
17454 {
17455 /* Get an RTL for this, if something has been emitted. */
17456 rtx rtl = lookup_constant_def (loc);
17457
17458 if (!rtl || !MEM_P (rtl))
17459 {
17460 gcc_assert (!rtl);
17461 return 0;
17462 }
17463 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17464
17465 /* TODO: We might get more coverage if we was actually delaying expansion
17466 of all expressions till end of compilation when constant pools are fully
17467 populated. */
17468 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17469 {
17470 expansion_failed (loc, NULL_RTX,
17471 "CST value in contant pool but not marked.");
17472 return 0;
17473 }
17474 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17475 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17476 }
17477
17478 /* Return dw_loc_list representing address of addr_expr LOC
17479 by looking for inner INDIRECT_REF expression and turning
17480 it into simple arithmetics.
17481
17482 See loc_list_from_tree for the meaning of CONTEXT. */
17483
17484 static dw_loc_list_ref
17485 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17486 loc_descr_context *context)
17487 {
17488 tree obj, offset;
17489 poly_int64 bitsize, bitpos, bytepos;
17490 machine_mode mode;
17491 int unsignedp, reversep, volatilep = 0;
17492 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17493
17494 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17495 &bitsize, &bitpos, &offset, &mode,
17496 &unsignedp, &reversep, &volatilep);
17497 STRIP_NOPS (obj);
17498 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17499 {
17500 expansion_failed (loc, NULL_RTX, "bitfield access");
17501 return 0;
17502 }
17503 if (!INDIRECT_REF_P (obj))
17504 {
17505 expansion_failed (obj,
17506 NULL_RTX, "no indirect ref in inner refrence");
17507 return 0;
17508 }
17509 if (!offset && known_eq (bitpos, 0))
17510 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17511 context);
17512 else if (toplev
17513 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17514 && (dwarf_version >= 4 || !dwarf_strict))
17515 {
17516 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17517 if (!list_ret)
17518 return 0;
17519 if (offset)
17520 {
17521 /* Variable offset. */
17522 list_ret1 = loc_list_from_tree (offset, 0, context);
17523 if (list_ret1 == 0)
17524 return 0;
17525 add_loc_list (&list_ret, list_ret1);
17526 if (!list_ret)
17527 return 0;
17528 add_loc_descr_to_each (list_ret,
17529 new_loc_descr (DW_OP_plus, 0, 0));
17530 }
17531 HOST_WIDE_INT value;
17532 if (bytepos.is_constant (&value) && value > 0)
17533 add_loc_descr_to_each (list_ret,
17534 new_loc_descr (DW_OP_plus_uconst, value, 0));
17535 else if (maybe_ne (bytepos, 0))
17536 loc_list_plus_const (list_ret, bytepos);
17537 add_loc_descr_to_each (list_ret,
17538 new_loc_descr (DW_OP_stack_value, 0, 0));
17539 }
17540 return list_ret;
17541 }
17542
17543 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17544 all operations from LOC are nops, move to the last one. Insert in NOPS all
17545 operations that are skipped. */
17546
17547 static void
17548 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17549 hash_set<dw_loc_descr_ref> &nops)
17550 {
17551 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17552 {
17553 nops.add (loc);
17554 loc = loc->dw_loc_next;
17555 }
17556 }
17557
17558 /* Helper for loc_descr_without_nops: free the location description operation
17559 P. */
17560
17561 bool
17562 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17563 {
17564 ggc_free (loc);
17565 return true;
17566 }
17567
17568 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17569 finishes LOC. */
17570
17571 static void
17572 loc_descr_without_nops (dw_loc_descr_ref &loc)
17573 {
17574 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17575 return;
17576
17577 /* Set of all DW_OP_nop operations we remove. */
17578 hash_set<dw_loc_descr_ref> nops;
17579
17580 /* First, strip all prefix NOP operations in order to keep the head of the
17581 operations list. */
17582 loc_descr_to_next_no_nop (loc, nops);
17583
17584 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17585 {
17586 /* For control flow operations: strip "prefix" nops in destination
17587 labels. */
17588 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17589 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17590 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17591 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17592
17593 /* Do the same for the operations that follow, then move to the next
17594 iteration. */
17595 if (cur->dw_loc_next != NULL)
17596 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17597 cur = cur->dw_loc_next;
17598 }
17599
17600 nops.traverse<void *, free_loc_descr> (NULL);
17601 }
17602
17603
17604 struct dwarf_procedure_info;
17605
17606 /* Helper structure for location descriptions generation. */
17607 struct loc_descr_context
17608 {
17609 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17610 NULL_TREE if DW_OP_push_object_address in invalid for this location
17611 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17612 tree context_type;
17613 /* The ..._DECL node that should be translated as a
17614 DW_OP_push_object_address operation. */
17615 tree base_decl;
17616 /* Information about the DWARF procedure we are currently generating. NULL if
17617 we are not generating a DWARF procedure. */
17618 struct dwarf_procedure_info *dpi;
17619 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17620 by consumer. Used for DW_TAG_generic_subrange attributes. */
17621 bool placeholder_arg;
17622 /* True if PLACEHOLDER_EXPR has been seen. */
17623 bool placeholder_seen;
17624 };
17625
17626 /* DWARF procedures generation
17627
17628 DWARF expressions (aka. location descriptions) are used to encode variable
17629 things such as sizes or offsets. Such computations can have redundant parts
17630 that can be factorized in order to reduce the size of the output debug
17631 information. This is the whole point of DWARF procedures.
17632
17633 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17634 already factorized into functions ("size functions") in order to handle very
17635 big and complex types. Such functions are quite simple: they have integral
17636 arguments, they return an integral result and their body contains only a
17637 return statement with arithmetic expressions. This is the only kind of
17638 function we are interested in translating into DWARF procedures, here.
17639
17640 DWARF expressions and DWARF procedure are executed using a stack, so we have
17641 to define some calling convention for them to interact. Let's say that:
17642
17643 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17644 all arguments in reverse order (right-to-left) so that when the DWARF
17645 procedure execution starts, the first argument is the top of the stack.
17646
17647 - Then, when returning, the DWARF procedure must have consumed all arguments
17648 on the stack, must have pushed the result and touched nothing else.
17649
17650 - Each integral argument and the result are integral types can be hold in a
17651 single stack slot.
17652
17653 - We call "frame offset" the number of stack slots that are "under DWARF
17654 procedure control": it includes the arguments slots, the temporaries and
17655 the result slot. Thus, it is equal to the number of arguments when the
17656 procedure execution starts and must be equal to one (the result) when it
17657 returns. */
17658
17659 /* Helper structure used when generating operations for a DWARF procedure. */
17660 struct dwarf_procedure_info
17661 {
17662 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17663 currently translated. */
17664 tree fndecl;
17665 /* The number of arguments FNDECL takes. */
17666 unsigned args_count;
17667 };
17668
17669 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17670 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17671 equate it to this DIE. */
17672
17673 static dw_die_ref
17674 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17675 dw_die_ref parent_die)
17676 {
17677 dw_die_ref dwarf_proc_die;
17678
17679 if ((dwarf_version < 3 && dwarf_strict)
17680 || location == NULL)
17681 return NULL;
17682
17683 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17684 if (fndecl)
17685 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17686 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17687 return dwarf_proc_die;
17688 }
17689
17690 /* Return whether TYPE is a supported type as a DWARF procedure argument
17691 type or return type (we handle only scalar types and pointer types that
17692 aren't wider than the DWARF expression evaluation stack. */
17693
17694 static bool
17695 is_handled_procedure_type (tree type)
17696 {
17697 return ((INTEGRAL_TYPE_P (type)
17698 || TREE_CODE (type) == OFFSET_TYPE
17699 || TREE_CODE (type) == POINTER_TYPE)
17700 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17701 }
17702
17703 /* Helper for resolve_args_picking: do the same but stop when coming across
17704 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17705 offset *before* evaluating the corresponding operation. */
17706
17707 static bool
17708 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17709 struct dwarf_procedure_info *dpi,
17710 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17711 {
17712 /* The "frame_offset" identifier is already used to name a macro... */
17713 unsigned frame_offset_ = initial_frame_offset;
17714 dw_loc_descr_ref l;
17715
17716 for (l = loc; l != NULL;)
17717 {
17718 bool existed;
17719 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17720
17721 /* If we already met this node, there is nothing to compute anymore. */
17722 if (existed)
17723 {
17724 /* Make sure that the stack size is consistent wherever the execution
17725 flow comes from. */
17726 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17727 break;
17728 }
17729 l_frame_offset = frame_offset_;
17730
17731 /* If needed, relocate the picking offset with respect to the frame
17732 offset. */
17733 if (l->frame_offset_rel)
17734 {
17735 unsigned HOST_WIDE_INT off;
17736 switch (l->dw_loc_opc)
17737 {
17738 case DW_OP_pick:
17739 off = l->dw_loc_oprnd1.v.val_unsigned;
17740 break;
17741 case DW_OP_dup:
17742 off = 0;
17743 break;
17744 case DW_OP_over:
17745 off = 1;
17746 break;
17747 default:
17748 gcc_unreachable ();
17749 }
17750 /* frame_offset_ is the size of the current stack frame, including
17751 incoming arguments. Besides, the arguments are pushed
17752 right-to-left. Thus, in order to access the Nth argument from
17753 this operation node, the picking has to skip temporaries *plus*
17754 one stack slot per argument (0 for the first one, 1 for the second
17755 one, etc.).
17756
17757 The targetted argument number (N) is already set as the operand,
17758 and the number of temporaries can be computed with:
17759 frame_offsets_ - dpi->args_count */
17760 off += frame_offset_ - dpi->args_count;
17761
17762 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17763 if (off > 255)
17764 return false;
17765
17766 if (off == 0)
17767 {
17768 l->dw_loc_opc = DW_OP_dup;
17769 l->dw_loc_oprnd1.v.val_unsigned = 0;
17770 }
17771 else if (off == 1)
17772 {
17773 l->dw_loc_opc = DW_OP_over;
17774 l->dw_loc_oprnd1.v.val_unsigned = 0;
17775 }
17776 else
17777 {
17778 l->dw_loc_opc = DW_OP_pick;
17779 l->dw_loc_oprnd1.v.val_unsigned = off;
17780 }
17781 }
17782
17783 /* Update frame_offset according to the effect the current operation has
17784 on the stack. */
17785 switch (l->dw_loc_opc)
17786 {
17787 case DW_OP_deref:
17788 case DW_OP_swap:
17789 case DW_OP_rot:
17790 case DW_OP_abs:
17791 case DW_OP_neg:
17792 case DW_OP_not:
17793 case DW_OP_plus_uconst:
17794 case DW_OP_skip:
17795 case DW_OP_reg0:
17796 case DW_OP_reg1:
17797 case DW_OP_reg2:
17798 case DW_OP_reg3:
17799 case DW_OP_reg4:
17800 case DW_OP_reg5:
17801 case DW_OP_reg6:
17802 case DW_OP_reg7:
17803 case DW_OP_reg8:
17804 case DW_OP_reg9:
17805 case DW_OP_reg10:
17806 case DW_OP_reg11:
17807 case DW_OP_reg12:
17808 case DW_OP_reg13:
17809 case DW_OP_reg14:
17810 case DW_OP_reg15:
17811 case DW_OP_reg16:
17812 case DW_OP_reg17:
17813 case DW_OP_reg18:
17814 case DW_OP_reg19:
17815 case DW_OP_reg20:
17816 case DW_OP_reg21:
17817 case DW_OP_reg22:
17818 case DW_OP_reg23:
17819 case DW_OP_reg24:
17820 case DW_OP_reg25:
17821 case DW_OP_reg26:
17822 case DW_OP_reg27:
17823 case DW_OP_reg28:
17824 case DW_OP_reg29:
17825 case DW_OP_reg30:
17826 case DW_OP_reg31:
17827 case DW_OP_bregx:
17828 case DW_OP_piece:
17829 case DW_OP_deref_size:
17830 case DW_OP_nop:
17831 case DW_OP_bit_piece:
17832 case DW_OP_implicit_value:
17833 case DW_OP_stack_value:
17834 break;
17835
17836 case DW_OP_addr:
17837 case DW_OP_const1u:
17838 case DW_OP_const1s:
17839 case DW_OP_const2u:
17840 case DW_OP_const2s:
17841 case DW_OP_const4u:
17842 case DW_OP_const4s:
17843 case DW_OP_const8u:
17844 case DW_OP_const8s:
17845 case DW_OP_constu:
17846 case DW_OP_consts:
17847 case DW_OP_dup:
17848 case DW_OP_over:
17849 case DW_OP_pick:
17850 case DW_OP_lit0:
17851 case DW_OP_lit1:
17852 case DW_OP_lit2:
17853 case DW_OP_lit3:
17854 case DW_OP_lit4:
17855 case DW_OP_lit5:
17856 case DW_OP_lit6:
17857 case DW_OP_lit7:
17858 case DW_OP_lit8:
17859 case DW_OP_lit9:
17860 case DW_OP_lit10:
17861 case DW_OP_lit11:
17862 case DW_OP_lit12:
17863 case DW_OP_lit13:
17864 case DW_OP_lit14:
17865 case DW_OP_lit15:
17866 case DW_OP_lit16:
17867 case DW_OP_lit17:
17868 case DW_OP_lit18:
17869 case DW_OP_lit19:
17870 case DW_OP_lit20:
17871 case DW_OP_lit21:
17872 case DW_OP_lit22:
17873 case DW_OP_lit23:
17874 case DW_OP_lit24:
17875 case DW_OP_lit25:
17876 case DW_OP_lit26:
17877 case DW_OP_lit27:
17878 case DW_OP_lit28:
17879 case DW_OP_lit29:
17880 case DW_OP_lit30:
17881 case DW_OP_lit31:
17882 case DW_OP_breg0:
17883 case DW_OP_breg1:
17884 case DW_OP_breg2:
17885 case DW_OP_breg3:
17886 case DW_OP_breg4:
17887 case DW_OP_breg5:
17888 case DW_OP_breg6:
17889 case DW_OP_breg7:
17890 case DW_OP_breg8:
17891 case DW_OP_breg9:
17892 case DW_OP_breg10:
17893 case DW_OP_breg11:
17894 case DW_OP_breg12:
17895 case DW_OP_breg13:
17896 case DW_OP_breg14:
17897 case DW_OP_breg15:
17898 case DW_OP_breg16:
17899 case DW_OP_breg17:
17900 case DW_OP_breg18:
17901 case DW_OP_breg19:
17902 case DW_OP_breg20:
17903 case DW_OP_breg21:
17904 case DW_OP_breg22:
17905 case DW_OP_breg23:
17906 case DW_OP_breg24:
17907 case DW_OP_breg25:
17908 case DW_OP_breg26:
17909 case DW_OP_breg27:
17910 case DW_OP_breg28:
17911 case DW_OP_breg29:
17912 case DW_OP_breg30:
17913 case DW_OP_breg31:
17914 case DW_OP_fbreg:
17915 case DW_OP_push_object_address:
17916 case DW_OP_call_frame_cfa:
17917 case DW_OP_GNU_variable_value:
17918 case DW_OP_GNU_addr_index:
17919 case DW_OP_GNU_const_index:
17920 ++frame_offset_;
17921 break;
17922
17923 case DW_OP_drop:
17924 case DW_OP_xderef:
17925 case DW_OP_and:
17926 case DW_OP_div:
17927 case DW_OP_minus:
17928 case DW_OP_mod:
17929 case DW_OP_mul:
17930 case DW_OP_or:
17931 case DW_OP_plus:
17932 case DW_OP_shl:
17933 case DW_OP_shr:
17934 case DW_OP_shra:
17935 case DW_OP_xor:
17936 case DW_OP_bra:
17937 case DW_OP_eq:
17938 case DW_OP_ge:
17939 case DW_OP_gt:
17940 case DW_OP_le:
17941 case DW_OP_lt:
17942 case DW_OP_ne:
17943 case DW_OP_regx:
17944 case DW_OP_xderef_size:
17945 --frame_offset_;
17946 break;
17947
17948 case DW_OP_call2:
17949 case DW_OP_call4:
17950 case DW_OP_call_ref:
17951 {
17952 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17953 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17954
17955 if (stack_usage == NULL)
17956 return false;
17957 frame_offset_ += *stack_usage;
17958 break;
17959 }
17960
17961 case DW_OP_implicit_pointer:
17962 case DW_OP_entry_value:
17963 case DW_OP_const_type:
17964 case DW_OP_regval_type:
17965 case DW_OP_deref_type:
17966 case DW_OP_convert:
17967 case DW_OP_reinterpret:
17968 case DW_OP_form_tls_address:
17969 case DW_OP_GNU_push_tls_address:
17970 case DW_OP_GNU_uninit:
17971 case DW_OP_GNU_encoded_addr:
17972 case DW_OP_GNU_implicit_pointer:
17973 case DW_OP_GNU_entry_value:
17974 case DW_OP_GNU_const_type:
17975 case DW_OP_GNU_regval_type:
17976 case DW_OP_GNU_deref_type:
17977 case DW_OP_GNU_convert:
17978 case DW_OP_GNU_reinterpret:
17979 case DW_OP_GNU_parameter_ref:
17980 /* loc_list_from_tree will probably not output these operations for
17981 size functions, so assume they will not appear here. */
17982 /* Fall through... */
17983
17984 default:
17985 gcc_unreachable ();
17986 }
17987
17988 /* Now, follow the control flow (except subroutine calls). */
17989 switch (l->dw_loc_opc)
17990 {
17991 case DW_OP_bra:
17992 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17993 frame_offsets))
17994 return false;
17995 /* Fall through. */
17996
17997 case DW_OP_skip:
17998 l = l->dw_loc_oprnd1.v.val_loc;
17999 break;
18000
18001 case DW_OP_stack_value:
18002 return true;
18003
18004 default:
18005 l = l->dw_loc_next;
18006 break;
18007 }
18008 }
18009
18010 return true;
18011 }
18012
18013 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18014 operations) in order to resolve the operand of DW_OP_pick operations that
18015 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18016 offset *before* LOC is executed. Return if all relocations were
18017 successful. */
18018
18019 static bool
18020 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18021 struct dwarf_procedure_info *dpi)
18022 {
18023 /* Associate to all visited operations the frame offset *before* evaluating
18024 this operation. */
18025 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18026
18027 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18028 frame_offsets);
18029 }
18030
18031 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18032 Return NULL if it is not possible. */
18033
18034 static dw_die_ref
18035 function_to_dwarf_procedure (tree fndecl)
18036 {
18037 struct loc_descr_context ctx;
18038 struct dwarf_procedure_info dpi;
18039 dw_die_ref dwarf_proc_die;
18040 tree tree_body = DECL_SAVED_TREE (fndecl);
18041 dw_loc_descr_ref loc_body, epilogue;
18042
18043 tree cursor;
18044 unsigned i;
18045
18046 /* Do not generate multiple DWARF procedures for the same function
18047 declaration. */
18048 dwarf_proc_die = lookup_decl_die (fndecl);
18049 if (dwarf_proc_die != NULL)
18050 return dwarf_proc_die;
18051
18052 /* DWARF procedures are available starting with the DWARFv3 standard. */
18053 if (dwarf_version < 3 && dwarf_strict)
18054 return NULL;
18055
18056 /* We handle only functions for which we still have a body, that return a
18057 supported type and that takes arguments with supported types. Note that
18058 there is no point translating functions that return nothing. */
18059 if (tree_body == NULL_TREE
18060 || DECL_RESULT (fndecl) == NULL_TREE
18061 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18062 return NULL;
18063
18064 for (cursor = DECL_ARGUMENTS (fndecl);
18065 cursor != NULL_TREE;
18066 cursor = TREE_CHAIN (cursor))
18067 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18068 return NULL;
18069
18070 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18071 if (TREE_CODE (tree_body) != RETURN_EXPR)
18072 return NULL;
18073 tree_body = TREE_OPERAND (tree_body, 0);
18074 if (TREE_CODE (tree_body) != MODIFY_EXPR
18075 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18076 return NULL;
18077 tree_body = TREE_OPERAND (tree_body, 1);
18078
18079 /* Try to translate the body expression itself. Note that this will probably
18080 cause an infinite recursion if its call graph has a cycle. This is very
18081 unlikely for size functions, however, so don't bother with such things at
18082 the moment. */
18083 ctx.context_type = NULL_TREE;
18084 ctx.base_decl = NULL_TREE;
18085 ctx.dpi = &dpi;
18086 ctx.placeholder_arg = false;
18087 ctx.placeholder_seen = false;
18088 dpi.fndecl = fndecl;
18089 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18090 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18091 if (!loc_body)
18092 return NULL;
18093
18094 /* After evaluating all operands in "loc_body", we should still have on the
18095 stack all arguments plus the desired function result (top of the stack).
18096 Generate code in order to keep only the result in our stack frame. */
18097 epilogue = NULL;
18098 for (i = 0; i < dpi.args_count; ++i)
18099 {
18100 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18101 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18102 op_couple->dw_loc_next->dw_loc_next = epilogue;
18103 epilogue = op_couple;
18104 }
18105 add_loc_descr (&loc_body, epilogue);
18106 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18107 return NULL;
18108
18109 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18110 because they are considered useful. Now there is an epilogue, they are
18111 not anymore, so give it another try. */
18112 loc_descr_without_nops (loc_body);
18113
18114 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18115 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18116 though, given that size functions do not come from source, so they should
18117 not have a dedicated DW_TAG_subprogram DIE. */
18118 dwarf_proc_die
18119 = new_dwarf_proc_die (loc_body, fndecl,
18120 get_context_die (DECL_CONTEXT (fndecl)));
18121
18122 /* The called DWARF procedure consumes one stack slot per argument and
18123 returns one stack slot. */
18124 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18125
18126 return dwarf_proc_die;
18127 }
18128
18129
18130 /* Generate Dwarf location list representing LOC.
18131 If WANT_ADDRESS is false, expression computing LOC will be computed
18132 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18133 if WANT_ADDRESS is 2, expression computing address useable in location
18134 will be returned (i.e. DW_OP_reg can be used
18135 to refer to register values).
18136
18137 CONTEXT provides information to customize the location descriptions
18138 generation. Its context_type field specifies what type is implicitly
18139 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18140 will not be generated.
18141
18142 Its DPI field determines whether we are generating a DWARF expression for a
18143 DWARF procedure, so PARM_DECL references are processed specifically.
18144
18145 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18146 and dpi fields were null. */
18147
18148 static dw_loc_list_ref
18149 loc_list_from_tree_1 (tree loc, int want_address,
18150 struct loc_descr_context *context)
18151 {
18152 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18153 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18154 int have_address = 0;
18155 enum dwarf_location_atom op;
18156
18157 /* ??? Most of the time we do not take proper care for sign/zero
18158 extending the values properly. Hopefully this won't be a real
18159 problem... */
18160
18161 if (context != NULL
18162 && context->base_decl == loc
18163 && want_address == 0)
18164 {
18165 if (dwarf_version >= 3 || !dwarf_strict)
18166 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18167 NULL, 0, NULL, 0, NULL);
18168 else
18169 return NULL;
18170 }
18171
18172 switch (TREE_CODE (loc))
18173 {
18174 case ERROR_MARK:
18175 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18176 return 0;
18177
18178 case PLACEHOLDER_EXPR:
18179 /* This case involves extracting fields from an object to determine the
18180 position of other fields. It is supposed to appear only as the first
18181 operand of COMPONENT_REF nodes and to reference precisely the type
18182 that the context allows. */
18183 if (context != NULL
18184 && TREE_TYPE (loc) == context->context_type
18185 && want_address >= 1)
18186 {
18187 if (dwarf_version >= 3 || !dwarf_strict)
18188 {
18189 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18190 have_address = 1;
18191 break;
18192 }
18193 else
18194 return NULL;
18195 }
18196 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18197 the single argument passed by consumer. */
18198 else if (context != NULL
18199 && context->placeholder_arg
18200 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18201 && want_address == 0)
18202 {
18203 ret = new_loc_descr (DW_OP_pick, 0, 0);
18204 ret->frame_offset_rel = 1;
18205 context->placeholder_seen = true;
18206 break;
18207 }
18208 else
18209 expansion_failed (loc, NULL_RTX,
18210 "PLACEHOLDER_EXPR for an unexpected type");
18211 break;
18212
18213 case CALL_EXPR:
18214 {
18215 const int nargs = call_expr_nargs (loc);
18216 tree callee = get_callee_fndecl (loc);
18217 int i;
18218 dw_die_ref dwarf_proc;
18219
18220 if (callee == NULL_TREE)
18221 goto call_expansion_failed;
18222
18223 /* We handle only functions that return an integer. */
18224 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18225 goto call_expansion_failed;
18226
18227 dwarf_proc = function_to_dwarf_procedure (callee);
18228 if (dwarf_proc == NULL)
18229 goto call_expansion_failed;
18230
18231 /* Evaluate arguments right-to-left so that the first argument will
18232 be the top-most one on the stack. */
18233 for (i = nargs - 1; i >= 0; --i)
18234 {
18235 dw_loc_descr_ref loc_descr
18236 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18237 context);
18238
18239 if (loc_descr == NULL)
18240 goto call_expansion_failed;
18241
18242 add_loc_descr (&ret, loc_descr);
18243 }
18244
18245 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18246 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18247 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18248 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18249 add_loc_descr (&ret, ret1);
18250 break;
18251
18252 call_expansion_failed:
18253 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18254 /* There are no opcodes for these operations. */
18255 return 0;
18256 }
18257
18258 case PREINCREMENT_EXPR:
18259 case PREDECREMENT_EXPR:
18260 case POSTINCREMENT_EXPR:
18261 case POSTDECREMENT_EXPR:
18262 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18263 /* There are no opcodes for these operations. */
18264 return 0;
18265
18266 case ADDR_EXPR:
18267 /* If we already want an address, see if there is INDIRECT_REF inside
18268 e.g. for &this->field. */
18269 if (want_address)
18270 {
18271 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18272 (loc, want_address == 2, context);
18273 if (list_ret)
18274 have_address = 1;
18275 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18276 && (ret = cst_pool_loc_descr (loc)))
18277 have_address = 1;
18278 }
18279 /* Otherwise, process the argument and look for the address. */
18280 if (!list_ret && !ret)
18281 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18282 else
18283 {
18284 if (want_address)
18285 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18286 return NULL;
18287 }
18288 break;
18289
18290 case VAR_DECL:
18291 if (DECL_THREAD_LOCAL_P (loc))
18292 {
18293 rtx rtl;
18294 enum dwarf_location_atom tls_op;
18295 enum dtprel_bool dtprel = dtprel_false;
18296
18297 if (targetm.have_tls)
18298 {
18299 /* If this is not defined, we have no way to emit the
18300 data. */
18301 if (!targetm.asm_out.output_dwarf_dtprel)
18302 return 0;
18303
18304 /* The way DW_OP_GNU_push_tls_address is specified, we
18305 can only look up addresses of objects in the current
18306 module. We used DW_OP_addr as first op, but that's
18307 wrong, because DW_OP_addr is relocated by the debug
18308 info consumer, while DW_OP_GNU_push_tls_address
18309 operand shouldn't be. */
18310 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18311 return 0;
18312 dtprel = dtprel_true;
18313 /* We check for DWARF 5 here because gdb did not implement
18314 DW_OP_form_tls_address until after 7.12. */
18315 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18316 : DW_OP_GNU_push_tls_address);
18317 }
18318 else
18319 {
18320 if (!targetm.emutls.debug_form_tls_address
18321 || !(dwarf_version >= 3 || !dwarf_strict))
18322 return 0;
18323 /* We stuffed the control variable into the DECL_VALUE_EXPR
18324 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18325 no longer appear in gimple code. We used the control
18326 variable in specific so that we could pick it up here. */
18327 loc = DECL_VALUE_EXPR (loc);
18328 tls_op = DW_OP_form_tls_address;
18329 }
18330
18331 rtl = rtl_for_decl_location (loc);
18332 if (rtl == NULL_RTX)
18333 return 0;
18334
18335 if (!MEM_P (rtl))
18336 return 0;
18337 rtl = XEXP (rtl, 0);
18338 if (! CONSTANT_P (rtl))
18339 return 0;
18340
18341 ret = new_addr_loc_descr (rtl, dtprel);
18342 ret1 = new_loc_descr (tls_op, 0, 0);
18343 add_loc_descr (&ret, ret1);
18344
18345 have_address = 1;
18346 break;
18347 }
18348 /* FALLTHRU */
18349
18350 case PARM_DECL:
18351 if (context != NULL && context->dpi != NULL
18352 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18353 {
18354 /* We are generating code for a DWARF procedure and we want to access
18355 one of its arguments: find the appropriate argument offset and let
18356 the resolve_args_picking pass compute the offset that complies
18357 with the stack frame size. */
18358 unsigned i = 0;
18359 tree cursor;
18360
18361 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18362 cursor != NULL_TREE && cursor != loc;
18363 cursor = TREE_CHAIN (cursor), ++i)
18364 ;
18365 /* If we are translating a DWARF procedure, all referenced parameters
18366 must belong to the current function. */
18367 gcc_assert (cursor != NULL_TREE);
18368
18369 ret = new_loc_descr (DW_OP_pick, i, 0);
18370 ret->frame_offset_rel = 1;
18371 break;
18372 }
18373 /* FALLTHRU */
18374
18375 case RESULT_DECL:
18376 if (DECL_HAS_VALUE_EXPR_P (loc))
18377 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18378 want_address, context);
18379 /* FALLTHRU */
18380
18381 case FUNCTION_DECL:
18382 {
18383 rtx rtl;
18384 var_loc_list *loc_list = lookup_decl_loc (loc);
18385
18386 if (loc_list && loc_list->first)
18387 {
18388 list_ret = dw_loc_list (loc_list, loc, want_address);
18389 have_address = want_address != 0;
18390 break;
18391 }
18392 rtl = rtl_for_decl_location (loc);
18393 if (rtl == NULL_RTX)
18394 {
18395 if (TREE_CODE (loc) != FUNCTION_DECL
18396 && early_dwarf
18397 && current_function_decl
18398 && want_address != 1
18399 && ! DECL_IGNORED_P (loc)
18400 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18401 || POINTER_TYPE_P (TREE_TYPE (loc)))
18402 && DECL_CONTEXT (loc) == current_function_decl
18403 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18404 <= DWARF2_ADDR_SIZE))
18405 {
18406 dw_die_ref ref = lookup_decl_die (loc);
18407 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18408 if (ref)
18409 {
18410 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18411 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18412 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18413 }
18414 else
18415 {
18416 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18417 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18418 }
18419 break;
18420 }
18421 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18422 return 0;
18423 }
18424 else if (CONST_INT_P (rtl))
18425 {
18426 HOST_WIDE_INT val = INTVAL (rtl);
18427 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18428 val &= GET_MODE_MASK (DECL_MODE (loc));
18429 ret = int_loc_descriptor (val);
18430 }
18431 else if (GET_CODE (rtl) == CONST_STRING)
18432 {
18433 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18434 return 0;
18435 }
18436 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18437 ret = new_addr_loc_descr (rtl, dtprel_false);
18438 else
18439 {
18440 machine_mode mode, mem_mode;
18441
18442 /* Certain constructs can only be represented at top-level. */
18443 if (want_address == 2)
18444 {
18445 ret = loc_descriptor (rtl, VOIDmode,
18446 VAR_INIT_STATUS_INITIALIZED);
18447 have_address = 1;
18448 }
18449 else
18450 {
18451 mode = GET_MODE (rtl);
18452 mem_mode = VOIDmode;
18453 if (MEM_P (rtl))
18454 {
18455 mem_mode = mode;
18456 mode = get_address_mode (rtl);
18457 rtl = XEXP (rtl, 0);
18458 have_address = 1;
18459 }
18460 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18461 VAR_INIT_STATUS_INITIALIZED);
18462 }
18463 if (!ret)
18464 expansion_failed (loc, rtl,
18465 "failed to produce loc descriptor for rtl");
18466 }
18467 }
18468 break;
18469
18470 case MEM_REF:
18471 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18472 {
18473 have_address = 1;
18474 goto do_plus;
18475 }
18476 /* Fallthru. */
18477 case INDIRECT_REF:
18478 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18479 have_address = 1;
18480 break;
18481
18482 case TARGET_MEM_REF:
18483 case SSA_NAME:
18484 case DEBUG_EXPR_DECL:
18485 return NULL;
18486
18487 case COMPOUND_EXPR:
18488 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18489 context);
18490
18491 CASE_CONVERT:
18492 case VIEW_CONVERT_EXPR:
18493 case SAVE_EXPR:
18494 case MODIFY_EXPR:
18495 case NON_LVALUE_EXPR:
18496 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18497 context);
18498
18499 case COMPONENT_REF:
18500 case BIT_FIELD_REF:
18501 case ARRAY_REF:
18502 case ARRAY_RANGE_REF:
18503 case REALPART_EXPR:
18504 case IMAGPART_EXPR:
18505 {
18506 tree obj, offset;
18507 poly_int64 bitsize, bitpos, bytepos;
18508 machine_mode mode;
18509 int unsignedp, reversep, volatilep = 0;
18510
18511 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18512 &unsignedp, &reversep, &volatilep);
18513
18514 gcc_assert (obj != loc);
18515
18516 list_ret = loc_list_from_tree_1 (obj,
18517 want_address == 2
18518 && known_eq (bitpos, 0)
18519 && !offset ? 2 : 1,
18520 context);
18521 /* TODO: We can extract value of the small expression via shifting even
18522 for nonzero bitpos. */
18523 if (list_ret == 0)
18524 return 0;
18525 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18526 || !multiple_p (bitsize, BITS_PER_UNIT))
18527 {
18528 expansion_failed (loc, NULL_RTX,
18529 "bitfield access");
18530 return 0;
18531 }
18532
18533 if (offset != NULL_TREE)
18534 {
18535 /* Variable offset. */
18536 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18537 if (list_ret1 == 0)
18538 return 0;
18539 add_loc_list (&list_ret, list_ret1);
18540 if (!list_ret)
18541 return 0;
18542 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18543 }
18544
18545 HOST_WIDE_INT value;
18546 if (bytepos.is_constant (&value) && value > 0)
18547 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18548 value, 0));
18549 else if (maybe_ne (bytepos, 0))
18550 loc_list_plus_const (list_ret, bytepos);
18551
18552 have_address = 1;
18553 break;
18554 }
18555
18556 case INTEGER_CST:
18557 if ((want_address || !tree_fits_shwi_p (loc))
18558 && (ret = cst_pool_loc_descr (loc)))
18559 have_address = 1;
18560 else if (want_address == 2
18561 && tree_fits_shwi_p (loc)
18562 && (ret = address_of_int_loc_descriptor
18563 (int_size_in_bytes (TREE_TYPE (loc)),
18564 tree_to_shwi (loc))))
18565 have_address = 1;
18566 else if (tree_fits_shwi_p (loc))
18567 ret = int_loc_descriptor (tree_to_shwi (loc));
18568 else if (tree_fits_uhwi_p (loc))
18569 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18570 else
18571 {
18572 expansion_failed (loc, NULL_RTX,
18573 "Integer operand is not host integer");
18574 return 0;
18575 }
18576 break;
18577
18578 case POLY_INT_CST:
18579 {
18580 if (want_address)
18581 {
18582 expansion_failed (loc, NULL_RTX,
18583 "constant address with a runtime component");
18584 return 0;
18585 }
18586 poly_int64 value;
18587 if (!poly_int_tree_p (loc, &value))
18588 {
18589 expansion_failed (loc, NULL_RTX, "constant too big");
18590 return 0;
18591 }
18592 ret = int_loc_descriptor (value);
18593 }
18594 break;
18595
18596 case CONSTRUCTOR:
18597 case REAL_CST:
18598 case STRING_CST:
18599 case COMPLEX_CST:
18600 if ((ret = cst_pool_loc_descr (loc)))
18601 have_address = 1;
18602 else if (TREE_CODE (loc) == CONSTRUCTOR)
18603 {
18604 tree type = TREE_TYPE (loc);
18605 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18606 unsigned HOST_WIDE_INT offset = 0;
18607 unsigned HOST_WIDE_INT cnt;
18608 constructor_elt *ce;
18609
18610 if (TREE_CODE (type) == RECORD_TYPE)
18611 {
18612 /* This is very limited, but it's enough to output
18613 pointers to member functions, as long as the
18614 referenced function is defined in the current
18615 translation unit. */
18616 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18617 {
18618 tree val = ce->value;
18619
18620 tree field = ce->index;
18621
18622 if (val)
18623 STRIP_NOPS (val);
18624
18625 if (!field || DECL_BIT_FIELD (field))
18626 {
18627 expansion_failed (loc, NULL_RTX,
18628 "bitfield in record type constructor");
18629 size = offset = (unsigned HOST_WIDE_INT)-1;
18630 ret = NULL;
18631 break;
18632 }
18633
18634 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18635 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18636 gcc_assert (pos + fieldsize <= size);
18637 if (pos < offset)
18638 {
18639 expansion_failed (loc, NULL_RTX,
18640 "out-of-order fields in record constructor");
18641 size = offset = (unsigned HOST_WIDE_INT)-1;
18642 ret = NULL;
18643 break;
18644 }
18645 if (pos > offset)
18646 {
18647 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18648 add_loc_descr (&ret, ret1);
18649 offset = pos;
18650 }
18651 if (val && fieldsize != 0)
18652 {
18653 ret1 = loc_descriptor_from_tree (val, want_address, context);
18654 if (!ret1)
18655 {
18656 expansion_failed (loc, NULL_RTX,
18657 "unsupported expression in field");
18658 size = offset = (unsigned HOST_WIDE_INT)-1;
18659 ret = NULL;
18660 break;
18661 }
18662 add_loc_descr (&ret, ret1);
18663 }
18664 if (fieldsize)
18665 {
18666 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18667 add_loc_descr (&ret, ret1);
18668 offset = pos + fieldsize;
18669 }
18670 }
18671
18672 if (offset != size)
18673 {
18674 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18675 add_loc_descr (&ret, ret1);
18676 offset = size;
18677 }
18678
18679 have_address = !!want_address;
18680 }
18681 else
18682 expansion_failed (loc, NULL_RTX,
18683 "constructor of non-record type");
18684 }
18685 else
18686 /* We can construct small constants here using int_loc_descriptor. */
18687 expansion_failed (loc, NULL_RTX,
18688 "constructor or constant not in constant pool");
18689 break;
18690
18691 case TRUTH_AND_EXPR:
18692 case TRUTH_ANDIF_EXPR:
18693 case BIT_AND_EXPR:
18694 op = DW_OP_and;
18695 goto do_binop;
18696
18697 case TRUTH_XOR_EXPR:
18698 case BIT_XOR_EXPR:
18699 op = DW_OP_xor;
18700 goto do_binop;
18701
18702 case TRUTH_OR_EXPR:
18703 case TRUTH_ORIF_EXPR:
18704 case BIT_IOR_EXPR:
18705 op = DW_OP_or;
18706 goto do_binop;
18707
18708 case FLOOR_DIV_EXPR:
18709 case CEIL_DIV_EXPR:
18710 case ROUND_DIV_EXPR:
18711 case TRUNC_DIV_EXPR:
18712 case EXACT_DIV_EXPR:
18713 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18714 return 0;
18715 op = DW_OP_div;
18716 goto do_binop;
18717
18718 case MINUS_EXPR:
18719 op = DW_OP_minus;
18720 goto do_binop;
18721
18722 case FLOOR_MOD_EXPR:
18723 case CEIL_MOD_EXPR:
18724 case ROUND_MOD_EXPR:
18725 case TRUNC_MOD_EXPR:
18726 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18727 {
18728 op = DW_OP_mod;
18729 goto do_binop;
18730 }
18731 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18732 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18733 if (list_ret == 0 || list_ret1 == 0)
18734 return 0;
18735
18736 add_loc_list (&list_ret, list_ret1);
18737 if (list_ret == 0)
18738 return 0;
18739 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18740 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18741 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18742 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18743 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18744 break;
18745
18746 case MULT_EXPR:
18747 op = DW_OP_mul;
18748 goto do_binop;
18749
18750 case LSHIFT_EXPR:
18751 op = DW_OP_shl;
18752 goto do_binop;
18753
18754 case RSHIFT_EXPR:
18755 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18756 goto do_binop;
18757
18758 case POINTER_PLUS_EXPR:
18759 case PLUS_EXPR:
18760 do_plus:
18761 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18762 {
18763 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18764 smarter to encode their opposite. The DW_OP_plus_uconst operation
18765 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18766 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18767 bytes, Y being the size of the operation that pushes the opposite
18768 of the addend. So let's choose the smallest representation. */
18769 const tree tree_addend = TREE_OPERAND (loc, 1);
18770 offset_int wi_addend;
18771 HOST_WIDE_INT shwi_addend;
18772 dw_loc_descr_ref loc_naddend;
18773
18774 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18775 if (list_ret == 0)
18776 return 0;
18777
18778 /* Try to get the literal to push. It is the opposite of the addend,
18779 so as we rely on wrapping during DWARF evaluation, first decode
18780 the literal as a "DWARF-sized" signed number. */
18781 wi_addend = wi::to_offset (tree_addend);
18782 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18783 shwi_addend = wi_addend.to_shwi ();
18784 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18785 ? int_loc_descriptor (-shwi_addend)
18786 : NULL;
18787
18788 if (loc_naddend != NULL
18789 && ((unsigned) size_of_uleb128 (shwi_addend)
18790 > size_of_loc_descr (loc_naddend)))
18791 {
18792 add_loc_descr_to_each (list_ret, loc_naddend);
18793 add_loc_descr_to_each (list_ret,
18794 new_loc_descr (DW_OP_minus, 0, 0));
18795 }
18796 else
18797 {
18798 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18799 {
18800 loc_naddend = loc_cur;
18801 loc_cur = loc_cur->dw_loc_next;
18802 ggc_free (loc_naddend);
18803 }
18804 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18805 }
18806 break;
18807 }
18808
18809 op = DW_OP_plus;
18810 goto do_binop;
18811
18812 case LE_EXPR:
18813 op = DW_OP_le;
18814 goto do_comp_binop;
18815
18816 case GE_EXPR:
18817 op = DW_OP_ge;
18818 goto do_comp_binop;
18819
18820 case LT_EXPR:
18821 op = DW_OP_lt;
18822 goto do_comp_binop;
18823
18824 case GT_EXPR:
18825 op = DW_OP_gt;
18826 goto do_comp_binop;
18827
18828 do_comp_binop:
18829 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18830 {
18831 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18832 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18833 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18834 TREE_CODE (loc));
18835 break;
18836 }
18837 else
18838 goto do_binop;
18839
18840 case EQ_EXPR:
18841 op = DW_OP_eq;
18842 goto do_binop;
18843
18844 case NE_EXPR:
18845 op = DW_OP_ne;
18846 goto do_binop;
18847
18848 do_binop:
18849 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18850 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18851 if (list_ret == 0 || list_ret1 == 0)
18852 return 0;
18853
18854 add_loc_list (&list_ret, list_ret1);
18855 if (list_ret == 0)
18856 return 0;
18857 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18858 break;
18859
18860 case TRUTH_NOT_EXPR:
18861 case BIT_NOT_EXPR:
18862 op = DW_OP_not;
18863 goto do_unop;
18864
18865 case ABS_EXPR:
18866 op = DW_OP_abs;
18867 goto do_unop;
18868
18869 case NEGATE_EXPR:
18870 op = DW_OP_neg;
18871 goto do_unop;
18872
18873 do_unop:
18874 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18875 if (list_ret == 0)
18876 return 0;
18877
18878 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18879 break;
18880
18881 case MIN_EXPR:
18882 case MAX_EXPR:
18883 {
18884 const enum tree_code code =
18885 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18886
18887 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18888 build2 (code, integer_type_node,
18889 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18890 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18891 }
18892
18893 /* fall through */
18894
18895 case COND_EXPR:
18896 {
18897 dw_loc_descr_ref lhs
18898 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18899 dw_loc_list_ref rhs
18900 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18901 dw_loc_descr_ref bra_node, jump_node, tmp;
18902
18903 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18904 if (list_ret == 0 || lhs == 0 || rhs == 0)
18905 return 0;
18906
18907 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18908 add_loc_descr_to_each (list_ret, bra_node);
18909
18910 add_loc_list (&list_ret, rhs);
18911 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18912 add_loc_descr_to_each (list_ret, jump_node);
18913
18914 add_loc_descr_to_each (list_ret, lhs);
18915 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18916 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18917
18918 /* ??? Need a node to point the skip at. Use a nop. */
18919 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18920 add_loc_descr_to_each (list_ret, tmp);
18921 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18922 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18923 }
18924 break;
18925
18926 case FIX_TRUNC_EXPR:
18927 return 0;
18928
18929 default:
18930 /* Leave front-end specific codes as simply unknown. This comes
18931 up, for instance, with the C STMT_EXPR. */
18932 if ((unsigned int) TREE_CODE (loc)
18933 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18934 {
18935 expansion_failed (loc, NULL_RTX,
18936 "language specific tree node");
18937 return 0;
18938 }
18939
18940 /* Otherwise this is a generic code; we should just lists all of
18941 these explicitly. We forgot one. */
18942 if (flag_checking)
18943 gcc_unreachable ();
18944
18945 /* In a release build, we want to degrade gracefully: better to
18946 generate incomplete debugging information than to crash. */
18947 return NULL;
18948 }
18949
18950 if (!ret && !list_ret)
18951 return 0;
18952
18953 if (want_address == 2 && !have_address
18954 && (dwarf_version >= 4 || !dwarf_strict))
18955 {
18956 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18957 {
18958 expansion_failed (loc, NULL_RTX,
18959 "DWARF address size mismatch");
18960 return 0;
18961 }
18962 if (ret)
18963 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18964 else
18965 add_loc_descr_to_each (list_ret,
18966 new_loc_descr (DW_OP_stack_value, 0, 0));
18967 have_address = 1;
18968 }
18969 /* Show if we can't fill the request for an address. */
18970 if (want_address && !have_address)
18971 {
18972 expansion_failed (loc, NULL_RTX,
18973 "Want address and only have value");
18974 return 0;
18975 }
18976
18977 gcc_assert (!ret || !list_ret);
18978
18979 /* If we've got an address and don't want one, dereference. */
18980 if (!want_address && have_address)
18981 {
18982 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18983
18984 if (size > DWARF2_ADDR_SIZE || size == -1)
18985 {
18986 expansion_failed (loc, NULL_RTX,
18987 "DWARF address size mismatch");
18988 return 0;
18989 }
18990 else if (size == DWARF2_ADDR_SIZE)
18991 op = DW_OP_deref;
18992 else
18993 op = DW_OP_deref_size;
18994
18995 if (ret)
18996 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18997 else
18998 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18999 }
19000 if (ret)
19001 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
19002
19003 return list_ret;
19004 }
19005
19006 /* Likewise, but strip useless DW_OP_nop operations in the resulting
19007 expressions. */
19008
19009 static dw_loc_list_ref
19010 loc_list_from_tree (tree loc, int want_address,
19011 struct loc_descr_context *context)
19012 {
19013 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
19014
19015 for (dw_loc_list_ref loc_cur = result;
19016 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
19017 loc_descr_without_nops (loc_cur->expr);
19018 return result;
19019 }
19020
19021 /* Same as above but return only single location expression. */
19022 static dw_loc_descr_ref
19023 loc_descriptor_from_tree (tree loc, int want_address,
19024 struct loc_descr_context *context)
19025 {
19026 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
19027 if (!ret)
19028 return NULL;
19029 if (ret->dw_loc_next)
19030 {
19031 expansion_failed (loc, NULL_RTX,
19032 "Location list where only loc descriptor needed");
19033 return NULL;
19034 }
19035 return ret->expr;
19036 }
19037
19038 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19039 pointer to the declared type for the relevant field variable, or return
19040 `integer_type_node' if the given node turns out to be an
19041 ERROR_MARK node. */
19042
19043 static inline tree
19044 field_type (const_tree decl)
19045 {
19046 tree type;
19047
19048 if (TREE_CODE (decl) == ERROR_MARK)
19049 return integer_type_node;
19050
19051 type = DECL_BIT_FIELD_TYPE (decl);
19052 if (type == NULL_TREE)
19053 type = TREE_TYPE (decl);
19054
19055 return type;
19056 }
19057
19058 /* Given a pointer to a tree node, return the alignment in bits for
19059 it, or else return BITS_PER_WORD if the node actually turns out to
19060 be an ERROR_MARK node. */
19061
19062 static inline unsigned
19063 simple_type_align_in_bits (const_tree type)
19064 {
19065 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19066 }
19067
19068 static inline unsigned
19069 simple_decl_align_in_bits (const_tree decl)
19070 {
19071 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19072 }
19073
19074 /* Return the result of rounding T up to ALIGN. */
19075
19076 static inline offset_int
19077 round_up_to_align (const offset_int &t, unsigned int align)
19078 {
19079 return wi::udiv_trunc (t + align - 1, align) * align;
19080 }
19081
19082 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19083 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19084 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19085 if we fail to return the size in one of these two forms. */
19086
19087 static dw_loc_descr_ref
19088 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19089 {
19090 tree tree_size;
19091 struct loc_descr_context ctx;
19092
19093 /* Return a constant integer in priority, if possible. */
19094 *cst_size = int_size_in_bytes (type);
19095 if (*cst_size != -1)
19096 return NULL;
19097
19098 ctx.context_type = const_cast<tree> (type);
19099 ctx.base_decl = NULL_TREE;
19100 ctx.dpi = NULL;
19101 ctx.placeholder_arg = false;
19102 ctx.placeholder_seen = false;
19103
19104 type = TYPE_MAIN_VARIANT (type);
19105 tree_size = TYPE_SIZE_UNIT (type);
19106 return ((tree_size != NULL_TREE)
19107 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19108 : NULL);
19109 }
19110
19111 /* Helper structure for RECORD_TYPE processing. */
19112 struct vlr_context
19113 {
19114 /* Root RECORD_TYPE. It is needed to generate data member location
19115 descriptions in variable-length records (VLR), but also to cope with
19116 variants, which are composed of nested structures multiplexed with
19117 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19118 function processing a FIELD_DECL, it is required to be non null. */
19119 tree struct_type;
19120
19121 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19122 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19123 this variant part as part of the root record (in storage units). For
19124 regular records, it must be NULL_TREE. */
19125 tree variant_part_offset;
19126 };
19127
19128 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19129 addressed byte of the "containing object" for the given FIELD_DECL. If
19130 possible, return a native constant through CST_OFFSET (in which case NULL is
19131 returned); otherwise return a DWARF expression that computes the offset.
19132
19133 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19134 that offset is, either because the argument turns out to be a pointer to an
19135 ERROR_MARK node, or because the offset expression is too complex for us.
19136
19137 CTX is required: see the comment for VLR_CONTEXT. */
19138
19139 static dw_loc_descr_ref
19140 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19141 HOST_WIDE_INT *cst_offset)
19142 {
19143 tree tree_result;
19144 dw_loc_list_ref loc_result;
19145
19146 *cst_offset = 0;
19147
19148 if (TREE_CODE (decl) == ERROR_MARK)
19149 return NULL;
19150 else
19151 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19152
19153 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19154 case. */
19155 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19156 return NULL;
19157
19158 /* We used to handle only constant offsets in all cases. Now, we handle
19159 properly dynamic byte offsets only when PCC bitfield type doesn't
19160 matter. */
19161 if (PCC_BITFIELD_TYPE_MATTERS
19162 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19163 {
19164 offset_int object_offset_in_bits;
19165 offset_int object_offset_in_bytes;
19166 offset_int bitpos_int;
19167 tree type;
19168 tree field_size_tree;
19169 offset_int deepest_bitpos;
19170 offset_int field_size_in_bits;
19171 unsigned int type_align_in_bits;
19172 unsigned int decl_align_in_bits;
19173 offset_int type_size_in_bits;
19174
19175 bitpos_int = wi::to_offset (bit_position (decl));
19176 type = field_type (decl);
19177 type_size_in_bits = offset_int_type_size_in_bits (type);
19178 type_align_in_bits = simple_type_align_in_bits (type);
19179
19180 field_size_tree = DECL_SIZE (decl);
19181
19182 /* The size could be unspecified if there was an error, or for
19183 a flexible array member. */
19184 if (!field_size_tree)
19185 field_size_tree = bitsize_zero_node;
19186
19187 /* If the size of the field is not constant, use the type size. */
19188 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19189 field_size_in_bits = wi::to_offset (field_size_tree);
19190 else
19191 field_size_in_bits = type_size_in_bits;
19192
19193 decl_align_in_bits = simple_decl_align_in_bits (decl);
19194
19195 /* The GCC front-end doesn't make any attempt to keep track of the
19196 starting bit offset (relative to the start of the containing
19197 structure type) of the hypothetical "containing object" for a
19198 bit-field. Thus, when computing the byte offset value for the
19199 start of the "containing object" of a bit-field, we must deduce
19200 this information on our own. This can be rather tricky to do in
19201 some cases. For example, handling the following structure type
19202 definition when compiling for an i386/i486 target (which only
19203 aligns long long's to 32-bit boundaries) can be very tricky:
19204
19205 struct S { int field1; long long field2:31; };
19206
19207 Fortunately, there is a simple rule-of-thumb which can be used
19208 in such cases. When compiling for an i386/i486, GCC will
19209 allocate 8 bytes for the structure shown above. It decides to
19210 do this based upon one simple rule for bit-field allocation.
19211 GCC allocates each "containing object" for each bit-field at
19212 the first (i.e. lowest addressed) legitimate alignment boundary
19213 (based upon the required minimum alignment for the declared
19214 type of the field) which it can possibly use, subject to the
19215 condition that there is still enough available space remaining
19216 in the containing object (when allocated at the selected point)
19217 to fully accommodate all of the bits of the bit-field itself.
19218
19219 This simple rule makes it obvious why GCC allocates 8 bytes for
19220 each object of the structure type shown above. When looking
19221 for a place to allocate the "containing object" for `field2',
19222 the compiler simply tries to allocate a 64-bit "containing
19223 object" at each successive 32-bit boundary (starting at zero)
19224 until it finds a place to allocate that 64- bit field such that
19225 at least 31 contiguous (and previously unallocated) bits remain
19226 within that selected 64 bit field. (As it turns out, for the
19227 example above, the compiler finds it is OK to allocate the
19228 "containing object" 64-bit field at bit-offset zero within the
19229 structure type.)
19230
19231 Here we attempt to work backwards from the limited set of facts
19232 we're given, and we try to deduce from those facts, where GCC
19233 must have believed that the containing object started (within
19234 the structure type). The value we deduce is then used (by the
19235 callers of this routine) to generate DW_AT_location and
19236 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19237 the case of DW_AT_location, regular fields as well). */
19238
19239 /* Figure out the bit-distance from the start of the structure to
19240 the "deepest" bit of the bit-field. */
19241 deepest_bitpos = bitpos_int + field_size_in_bits;
19242
19243 /* This is the tricky part. Use some fancy footwork to deduce
19244 where the lowest addressed bit of the containing object must
19245 be. */
19246 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19247
19248 /* Round up to type_align by default. This works best for
19249 bitfields. */
19250 object_offset_in_bits
19251 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19252
19253 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19254 {
19255 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19256
19257 /* Round up to decl_align instead. */
19258 object_offset_in_bits
19259 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19260 }
19261
19262 object_offset_in_bytes
19263 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19264 if (ctx->variant_part_offset == NULL_TREE)
19265 {
19266 *cst_offset = object_offset_in_bytes.to_shwi ();
19267 return NULL;
19268 }
19269 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19270 }
19271 else
19272 tree_result = byte_position (decl);
19273
19274 if (ctx->variant_part_offset != NULL_TREE)
19275 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19276 ctx->variant_part_offset, tree_result);
19277
19278 /* If the byte offset is a constant, it's simplier to handle a native
19279 constant rather than a DWARF expression. */
19280 if (TREE_CODE (tree_result) == INTEGER_CST)
19281 {
19282 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19283 return NULL;
19284 }
19285 struct loc_descr_context loc_ctx = {
19286 ctx->struct_type, /* context_type */
19287 NULL_TREE, /* base_decl */
19288 NULL, /* dpi */
19289 false, /* placeholder_arg */
19290 false /* placeholder_seen */
19291 };
19292 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19293
19294 /* We want a DWARF expression: abort if we only have a location list with
19295 multiple elements. */
19296 if (!loc_result || !single_element_loc_list_p (loc_result))
19297 return NULL;
19298 else
19299 return loc_result->expr;
19300 }
19301 \f
19302 /* The following routines define various Dwarf attributes and any data
19303 associated with them. */
19304
19305 /* Add a location description attribute value to a DIE.
19306
19307 This emits location attributes suitable for whole variables and
19308 whole parameters. Note that the location attributes for struct fields are
19309 generated by the routine `data_member_location_attribute' below. */
19310
19311 static inline void
19312 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19313 dw_loc_list_ref descr)
19314 {
19315 bool check_no_locviews = true;
19316 if (descr == 0)
19317 return;
19318 if (single_element_loc_list_p (descr))
19319 add_AT_loc (die, attr_kind, descr->expr);
19320 else
19321 {
19322 add_AT_loc_list (die, attr_kind, descr);
19323 gcc_assert (descr->ll_symbol);
19324 if (attr_kind == DW_AT_location && descr->vl_symbol
19325 && dwarf2out_locviews_in_attribute ())
19326 {
19327 add_AT_view_list (die, DW_AT_GNU_locviews);
19328 check_no_locviews = false;
19329 }
19330 }
19331
19332 if (check_no_locviews)
19333 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19334 }
19335
19336 /* Add DW_AT_accessibility attribute to DIE if needed. */
19337
19338 static void
19339 add_accessibility_attribute (dw_die_ref die, tree decl)
19340 {
19341 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19342 children, otherwise the default is DW_ACCESS_public. In DWARF2
19343 the default has always been DW_ACCESS_public. */
19344 if (TREE_PROTECTED (decl))
19345 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19346 else if (TREE_PRIVATE (decl))
19347 {
19348 if (dwarf_version == 2
19349 || die->die_parent == NULL
19350 || die->die_parent->die_tag != DW_TAG_class_type)
19351 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19352 }
19353 else if (dwarf_version > 2
19354 && die->die_parent
19355 && die->die_parent->die_tag == DW_TAG_class_type)
19356 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19357 }
19358
19359 /* Attach the specialized form of location attribute used for data members of
19360 struct and union types. In the special case of a FIELD_DECL node which
19361 represents a bit-field, the "offset" part of this special location
19362 descriptor must indicate the distance in bytes from the lowest-addressed
19363 byte of the containing struct or union type to the lowest-addressed byte of
19364 the "containing object" for the bit-field. (See the `field_byte_offset'
19365 function above).
19366
19367 For any given bit-field, the "containing object" is a hypothetical object
19368 (of some integral or enum type) within which the given bit-field lives. The
19369 type of this hypothetical "containing object" is always the same as the
19370 declared type of the individual bit-field itself (for GCC anyway... the
19371 DWARF spec doesn't actually mandate this). Note that it is the size (in
19372 bytes) of the hypothetical "containing object" which will be given in the
19373 DW_AT_byte_size attribute for this bit-field. (See the
19374 `byte_size_attribute' function below.) It is also used when calculating the
19375 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19376 function below.)
19377
19378 CTX is required: see the comment for VLR_CONTEXT. */
19379
19380 static void
19381 add_data_member_location_attribute (dw_die_ref die,
19382 tree decl,
19383 struct vlr_context *ctx)
19384 {
19385 HOST_WIDE_INT offset;
19386 dw_loc_descr_ref loc_descr = 0;
19387
19388 if (TREE_CODE (decl) == TREE_BINFO)
19389 {
19390 /* We're working on the TAG_inheritance for a base class. */
19391 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19392 {
19393 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19394 aren't at a fixed offset from all (sub)objects of the same
19395 type. We need to extract the appropriate offset from our
19396 vtable. The following dwarf expression means
19397
19398 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19399
19400 This is specific to the V3 ABI, of course. */
19401
19402 dw_loc_descr_ref tmp;
19403
19404 /* Make a copy of the object address. */
19405 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19406 add_loc_descr (&loc_descr, tmp);
19407
19408 /* Extract the vtable address. */
19409 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19410 add_loc_descr (&loc_descr, tmp);
19411
19412 /* Calculate the address of the offset. */
19413 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19414 gcc_assert (offset < 0);
19415
19416 tmp = int_loc_descriptor (-offset);
19417 add_loc_descr (&loc_descr, tmp);
19418 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19419 add_loc_descr (&loc_descr, tmp);
19420
19421 /* Extract the offset. */
19422 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19423 add_loc_descr (&loc_descr, tmp);
19424
19425 /* Add it to the object address. */
19426 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19427 add_loc_descr (&loc_descr, tmp);
19428 }
19429 else
19430 offset = tree_to_shwi (BINFO_OFFSET (decl));
19431 }
19432 else
19433 {
19434 loc_descr = field_byte_offset (decl, ctx, &offset);
19435
19436 /* If loc_descr is available then we know the field offset is dynamic.
19437 However, GDB does not handle dynamic field offsets very well at the
19438 moment. */
19439 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19440 {
19441 loc_descr = NULL;
19442 offset = 0;
19443 }
19444
19445 /* Data member location evalutation starts with the base address on the
19446 stack. Compute the field offset and add it to this base address. */
19447 else if (loc_descr != NULL)
19448 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19449 }
19450
19451 if (! loc_descr)
19452 {
19453 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19454 e.g. GDB only added support to it in November 2016. For DWARF5
19455 we need newer debug info consumers anyway. We might change this
19456 to dwarf_version >= 4 once most consumers catched up. */
19457 if (dwarf_version >= 5
19458 && TREE_CODE (decl) == FIELD_DECL
19459 && DECL_BIT_FIELD_TYPE (decl)
19460 && (ctx->variant_part_offset == NULL_TREE
19461 || TREE_CODE (ctx->variant_part_offset) == INTEGER_CST))
19462 {
19463 tree off = bit_position (decl);
19464 if (ctx->variant_part_offset)
19465 off = bit_from_pos (ctx->variant_part_offset, off);
19466 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19467 {
19468 remove_AT (die, DW_AT_byte_size);
19469 remove_AT (die, DW_AT_bit_offset);
19470 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19471 return;
19472 }
19473 }
19474 if (dwarf_version > 2)
19475 {
19476 /* Don't need to output a location expression, just the constant. */
19477 if (offset < 0)
19478 add_AT_int (die, DW_AT_data_member_location, offset);
19479 else
19480 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19481 return;
19482 }
19483 else
19484 {
19485 enum dwarf_location_atom op;
19486
19487 /* The DWARF2 standard says that we should assume that the structure
19488 address is already on the stack, so we can specify a structure
19489 field address by using DW_OP_plus_uconst. */
19490 op = DW_OP_plus_uconst;
19491 loc_descr = new_loc_descr (op, offset, 0);
19492 }
19493 }
19494
19495 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19496 }
19497
19498 /* Writes integer values to dw_vec_const array. */
19499
19500 static void
19501 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19502 {
19503 while (size != 0)
19504 {
19505 *dest++ = val & 0xff;
19506 val >>= 8;
19507 --size;
19508 }
19509 }
19510
19511 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19512
19513 static HOST_WIDE_INT
19514 extract_int (const unsigned char *src, unsigned int size)
19515 {
19516 HOST_WIDE_INT val = 0;
19517
19518 src += size;
19519 while (size != 0)
19520 {
19521 val <<= 8;
19522 val |= *--src & 0xff;
19523 --size;
19524 }
19525 return val;
19526 }
19527
19528 /* Writes wide_int values to dw_vec_const array. */
19529
19530 static void
19531 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19532 {
19533 int i;
19534
19535 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19536 {
19537 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19538 return;
19539 }
19540
19541 /* We'd have to extend this code to support odd sizes. */
19542 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19543
19544 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19545
19546 if (WORDS_BIG_ENDIAN)
19547 for (i = n - 1; i >= 0; i--)
19548 {
19549 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19550 dest += sizeof (HOST_WIDE_INT);
19551 }
19552 else
19553 for (i = 0; i < n; i++)
19554 {
19555 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19556 dest += sizeof (HOST_WIDE_INT);
19557 }
19558 }
19559
19560 /* Writes floating point values to dw_vec_const array. */
19561
19562 static void
19563 insert_float (const_rtx rtl, unsigned char *array)
19564 {
19565 long val[4];
19566 int i;
19567 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19568
19569 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19570
19571 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19572 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19573 {
19574 insert_int (val[i], 4, array);
19575 array += 4;
19576 }
19577 }
19578
19579 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19580 does not have a "location" either in memory or in a register. These
19581 things can arise in GNU C when a constant is passed as an actual parameter
19582 to an inlined function. They can also arise in C++ where declared
19583 constants do not necessarily get memory "homes". */
19584
19585 static bool
19586 add_const_value_attribute (dw_die_ref die, rtx rtl)
19587 {
19588 switch (GET_CODE (rtl))
19589 {
19590 case CONST_INT:
19591 {
19592 HOST_WIDE_INT val = INTVAL (rtl);
19593
19594 if (val < 0)
19595 add_AT_int (die, DW_AT_const_value, val);
19596 else
19597 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19598 }
19599 return true;
19600
19601 case CONST_WIDE_INT:
19602 {
19603 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19604 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19605 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19606 wide_int w = wi::zext (w1, prec);
19607 add_AT_wide (die, DW_AT_const_value, w);
19608 }
19609 return true;
19610
19611 case CONST_DOUBLE:
19612 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19613 floating-point constant. A CONST_DOUBLE is used whenever the
19614 constant requires more than one word in order to be adequately
19615 represented. */
19616 if (TARGET_SUPPORTS_WIDE_INT == 0
19617 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19618 add_AT_double (die, DW_AT_const_value,
19619 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19620 else
19621 {
19622 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19623 unsigned int length = GET_MODE_SIZE (mode);
19624 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19625
19626 insert_float (rtl, array);
19627 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19628 }
19629 return true;
19630
19631 case CONST_VECTOR:
19632 {
19633 unsigned int length;
19634 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19635 return false;
19636
19637 machine_mode mode = GET_MODE (rtl);
19638 /* The combination of a length and byte elt_size doesn't extend
19639 naturally to boolean vectors, where several elements are packed
19640 into the same byte. */
19641 if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
19642 return false;
19643
19644 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19645 unsigned char *array
19646 = ggc_vec_alloc<unsigned char> (length * elt_size);
19647 unsigned int i;
19648 unsigned char *p;
19649 machine_mode imode = GET_MODE_INNER (mode);
19650
19651 switch (GET_MODE_CLASS (mode))
19652 {
19653 case MODE_VECTOR_INT:
19654 for (i = 0, p = array; i < length; i++, p += elt_size)
19655 {
19656 rtx elt = CONST_VECTOR_ELT (rtl, i);
19657 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19658 }
19659 break;
19660
19661 case MODE_VECTOR_FLOAT:
19662 for (i = 0, p = array; i < length; i++, p += elt_size)
19663 {
19664 rtx elt = CONST_VECTOR_ELT (rtl, i);
19665 insert_float (elt, p);
19666 }
19667 break;
19668
19669 default:
19670 gcc_unreachable ();
19671 }
19672
19673 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19674 }
19675 return true;
19676
19677 case CONST_STRING:
19678 if (dwarf_version >= 4 || !dwarf_strict)
19679 {
19680 dw_loc_descr_ref loc_result;
19681 resolve_one_addr (&rtl);
19682 rtl_addr:
19683 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19684 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19685 add_AT_loc (die, DW_AT_location, loc_result);
19686 vec_safe_push (used_rtx_array, rtl);
19687 return true;
19688 }
19689 return false;
19690
19691 case CONST:
19692 if (CONSTANT_P (XEXP (rtl, 0)))
19693 return add_const_value_attribute (die, XEXP (rtl, 0));
19694 /* FALLTHROUGH */
19695 case SYMBOL_REF:
19696 if (!const_ok_for_output (rtl))
19697 return false;
19698 /* FALLTHROUGH */
19699 case LABEL_REF:
19700 if (dwarf_version >= 4 || !dwarf_strict)
19701 goto rtl_addr;
19702 return false;
19703
19704 case PLUS:
19705 /* In cases where an inlined instance of an inline function is passed
19706 the address of an `auto' variable (which is local to the caller) we
19707 can get a situation where the DECL_RTL of the artificial local
19708 variable (for the inlining) which acts as a stand-in for the
19709 corresponding formal parameter (of the inline function) will look
19710 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19711 exactly a compile-time constant expression, but it isn't the address
19712 of the (artificial) local variable either. Rather, it represents the
19713 *value* which the artificial local variable always has during its
19714 lifetime. We currently have no way to represent such quasi-constant
19715 values in Dwarf, so for now we just punt and generate nothing. */
19716 return false;
19717
19718 case HIGH:
19719 case CONST_FIXED:
19720 case MINUS:
19721 case SIGN_EXTEND:
19722 case ZERO_EXTEND:
19723 case CONST_POLY_INT:
19724 return false;
19725
19726 case MEM:
19727 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19728 && MEM_READONLY_P (rtl)
19729 && GET_MODE (rtl) == BLKmode)
19730 {
19731 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19732 return true;
19733 }
19734 return false;
19735
19736 default:
19737 /* No other kinds of rtx should be possible here. */
19738 gcc_unreachable ();
19739 }
19740 return false;
19741 }
19742
19743 /* Determine whether the evaluation of EXPR references any variables
19744 or functions which aren't otherwise used (and therefore may not be
19745 output). */
19746 static tree
19747 reference_to_unused (tree * tp, int * walk_subtrees,
19748 void * data ATTRIBUTE_UNUSED)
19749 {
19750 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19751 *walk_subtrees = 0;
19752
19753 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19754 && ! TREE_ASM_WRITTEN (*tp))
19755 return *tp;
19756 /* ??? The C++ FE emits debug information for using decls, so
19757 putting gcc_unreachable here falls over. See PR31899. For now
19758 be conservative. */
19759 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19760 return *tp;
19761 else if (VAR_P (*tp))
19762 {
19763 varpool_node *node = varpool_node::get (*tp);
19764 if (!node || !node->definition)
19765 return *tp;
19766 }
19767 else if (TREE_CODE (*tp) == FUNCTION_DECL
19768 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19769 {
19770 /* The call graph machinery must have finished analyzing,
19771 optimizing and gimplifying the CU by now.
19772 So if *TP has no call graph node associated
19773 to it, it means *TP will not be emitted. */
19774 if (!cgraph_node::get (*tp))
19775 return *tp;
19776 }
19777 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19778 return *tp;
19779
19780 return NULL_TREE;
19781 }
19782
19783 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19784 for use in a later add_const_value_attribute call. */
19785
19786 static rtx
19787 rtl_for_decl_init (tree init, tree type)
19788 {
19789 rtx rtl = NULL_RTX;
19790
19791 STRIP_NOPS (init);
19792
19793 /* If a variable is initialized with a string constant without embedded
19794 zeros, build CONST_STRING. */
19795 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19796 {
19797 tree enttype = TREE_TYPE (type);
19798 tree domain = TYPE_DOMAIN (type);
19799 scalar_int_mode mode;
19800
19801 if (is_int_mode (TYPE_MODE (enttype), &mode)
19802 && GET_MODE_SIZE (mode) == 1
19803 && domain
19804 && TYPE_MAX_VALUE (domain)
19805 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19806 && integer_zerop (TYPE_MIN_VALUE (domain))
19807 && compare_tree_int (TYPE_MAX_VALUE (domain),
19808 TREE_STRING_LENGTH (init) - 1) == 0
19809 && ((size_t) TREE_STRING_LENGTH (init)
19810 == strlen (TREE_STRING_POINTER (init)) + 1))
19811 {
19812 rtl = gen_rtx_CONST_STRING (VOIDmode,
19813 ggc_strdup (TREE_STRING_POINTER (init)));
19814 rtl = gen_rtx_MEM (BLKmode, rtl);
19815 MEM_READONLY_P (rtl) = 1;
19816 }
19817 }
19818 /* Other aggregates, and complex values, could be represented using
19819 CONCAT: FIXME! */
19820 else if (AGGREGATE_TYPE_P (type)
19821 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19822 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19823 || TREE_CODE (type) == COMPLEX_TYPE)
19824 ;
19825 /* Vectors only work if their mode is supported by the target.
19826 FIXME: generic vectors ought to work too. */
19827 else if (TREE_CODE (type) == VECTOR_TYPE
19828 && !VECTOR_MODE_P (TYPE_MODE (type)))
19829 ;
19830 /* If the initializer is something that we know will expand into an
19831 immediate RTL constant, expand it now. We must be careful not to
19832 reference variables which won't be output. */
19833 else if (initializer_constant_valid_p (init, type)
19834 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19835 {
19836 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19837 possible. */
19838 if (TREE_CODE (type) == VECTOR_TYPE)
19839 switch (TREE_CODE (init))
19840 {
19841 case VECTOR_CST:
19842 break;
19843 case CONSTRUCTOR:
19844 if (TREE_CONSTANT (init))
19845 {
19846 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19847 bool constant_p = true;
19848 tree value;
19849 unsigned HOST_WIDE_INT ix;
19850
19851 /* Even when ctor is constant, it might contain non-*_CST
19852 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19853 belong into VECTOR_CST nodes. */
19854 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19855 if (!CONSTANT_CLASS_P (value))
19856 {
19857 constant_p = false;
19858 break;
19859 }
19860
19861 if (constant_p)
19862 {
19863 init = build_vector_from_ctor (type, elts);
19864 break;
19865 }
19866 }
19867 /* FALLTHRU */
19868
19869 default:
19870 return NULL;
19871 }
19872
19873 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19874
19875 /* If expand_expr returns a MEM, it wasn't immediate. */
19876 gcc_assert (!rtl || !MEM_P (rtl));
19877 }
19878
19879 return rtl;
19880 }
19881
19882 /* Generate RTL for the variable DECL to represent its location. */
19883
19884 static rtx
19885 rtl_for_decl_location (tree decl)
19886 {
19887 rtx rtl;
19888
19889 /* Here we have to decide where we are going to say the parameter "lives"
19890 (as far as the debugger is concerned). We only have a couple of
19891 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19892
19893 DECL_RTL normally indicates where the parameter lives during most of the
19894 activation of the function. If optimization is enabled however, this
19895 could be either NULL or else a pseudo-reg. Both of those cases indicate
19896 that the parameter doesn't really live anywhere (as far as the code
19897 generation parts of GCC are concerned) during most of the function's
19898 activation. That will happen (for example) if the parameter is never
19899 referenced within the function.
19900
19901 We could just generate a location descriptor here for all non-NULL
19902 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19903 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19904 where DECL_RTL is NULL or is a pseudo-reg.
19905
19906 Note however that we can only get away with using DECL_INCOMING_RTL as
19907 a backup substitute for DECL_RTL in certain limited cases. In cases
19908 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19909 we can be sure that the parameter was passed using the same type as it is
19910 declared to have within the function, and that its DECL_INCOMING_RTL
19911 points us to a place where a value of that type is passed.
19912
19913 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19914 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19915 because in these cases DECL_INCOMING_RTL points us to a value of some
19916 type which is *different* from the type of the parameter itself. Thus,
19917 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19918 such cases, the debugger would end up (for example) trying to fetch a
19919 `float' from a place which actually contains the first part of a
19920 `double'. That would lead to really incorrect and confusing
19921 output at debug-time.
19922
19923 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19924 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19925 are a couple of exceptions however. On little-endian machines we can
19926 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19927 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19928 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19929 when (on a little-endian machine) a non-prototyped function has a
19930 parameter declared to be of type `short' or `char'. In such cases,
19931 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19932 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19933 passed `int' value. If the debugger then uses that address to fetch
19934 a `short' or a `char' (on a little-endian machine) the result will be
19935 the correct data, so we allow for such exceptional cases below.
19936
19937 Note that our goal here is to describe the place where the given formal
19938 parameter lives during most of the function's activation (i.e. between the
19939 end of the prologue and the start of the epilogue). We'll do that as best
19940 as we can. Note however that if the given formal parameter is modified
19941 sometime during the execution of the function, then a stack backtrace (at
19942 debug-time) will show the function as having been called with the *new*
19943 value rather than the value which was originally passed in. This happens
19944 rarely enough that it is not a major problem, but it *is* a problem, and
19945 I'd like to fix it.
19946
19947 A future version of dwarf2out.c may generate two additional attributes for
19948 any given DW_TAG_formal_parameter DIE which will describe the "passed
19949 type" and the "passed location" for the given formal parameter in addition
19950 to the attributes we now generate to indicate the "declared type" and the
19951 "active location" for each parameter. This additional set of attributes
19952 could be used by debuggers for stack backtraces. Separately, note that
19953 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19954 This happens (for example) for inlined-instances of inline function formal
19955 parameters which are never referenced. This really shouldn't be
19956 happening. All PARM_DECL nodes should get valid non-NULL
19957 DECL_INCOMING_RTL values. FIXME. */
19958
19959 /* Use DECL_RTL as the "location" unless we find something better. */
19960 rtl = DECL_RTL_IF_SET (decl);
19961
19962 /* When generating abstract instances, ignore everything except
19963 constants, symbols living in memory, and symbols living in
19964 fixed registers. */
19965 if (! reload_completed)
19966 {
19967 if (rtl
19968 && (CONSTANT_P (rtl)
19969 || (MEM_P (rtl)
19970 && CONSTANT_P (XEXP (rtl, 0)))
19971 || (REG_P (rtl)
19972 && VAR_P (decl)
19973 && TREE_STATIC (decl))))
19974 {
19975 rtl = targetm.delegitimize_address (rtl);
19976 return rtl;
19977 }
19978 rtl = NULL_RTX;
19979 }
19980 else if (TREE_CODE (decl) == PARM_DECL)
19981 {
19982 if (rtl == NULL_RTX
19983 || is_pseudo_reg (rtl)
19984 || (MEM_P (rtl)
19985 && is_pseudo_reg (XEXP (rtl, 0))
19986 && DECL_INCOMING_RTL (decl)
19987 && MEM_P (DECL_INCOMING_RTL (decl))
19988 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19989 {
19990 tree declared_type = TREE_TYPE (decl);
19991 tree passed_type = DECL_ARG_TYPE (decl);
19992 machine_mode dmode = TYPE_MODE (declared_type);
19993 machine_mode pmode = TYPE_MODE (passed_type);
19994
19995 /* This decl represents a formal parameter which was optimized out.
19996 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19997 all cases where (rtl == NULL_RTX) just below. */
19998 if (dmode == pmode)
19999 rtl = DECL_INCOMING_RTL (decl);
20000 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
20001 && SCALAR_INT_MODE_P (dmode)
20002 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
20003 && DECL_INCOMING_RTL (decl))
20004 {
20005 rtx inc = DECL_INCOMING_RTL (decl);
20006 if (REG_P (inc))
20007 rtl = inc;
20008 else if (MEM_P (inc))
20009 {
20010 if (BYTES_BIG_ENDIAN)
20011 rtl = adjust_address_nv (inc, dmode,
20012 GET_MODE_SIZE (pmode)
20013 - GET_MODE_SIZE (dmode));
20014 else
20015 rtl = inc;
20016 }
20017 }
20018 }
20019
20020 /* If the parm was passed in registers, but lives on the stack, then
20021 make a big endian correction if the mode of the type of the
20022 parameter is not the same as the mode of the rtl. */
20023 /* ??? This is the same series of checks that are made in dbxout.c before
20024 we reach the big endian correction code there. It isn't clear if all
20025 of these checks are necessary here, but keeping them all is the safe
20026 thing to do. */
20027 else if (MEM_P (rtl)
20028 && XEXP (rtl, 0) != const0_rtx
20029 && ! CONSTANT_P (XEXP (rtl, 0))
20030 /* Not passed in memory. */
20031 && !MEM_P (DECL_INCOMING_RTL (decl))
20032 /* Not passed by invisible reference. */
20033 && (!REG_P (XEXP (rtl, 0))
20034 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
20035 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
20036 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
20037 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
20038 #endif
20039 )
20040 /* Big endian correction check. */
20041 && BYTES_BIG_ENDIAN
20042 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20043 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20044 UNITS_PER_WORD))
20045 {
20046 machine_mode addr_mode = get_address_mode (rtl);
20047 poly_int64 offset = (UNITS_PER_WORD
20048 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20049
20050 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20051 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20052 }
20053 }
20054 else if (VAR_P (decl)
20055 && rtl
20056 && MEM_P (rtl)
20057 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20058 {
20059 machine_mode addr_mode = get_address_mode (rtl);
20060 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20061 GET_MODE (rtl));
20062
20063 /* If a variable is declared "register" yet is smaller than
20064 a register, then if we store the variable to memory, it
20065 looks like we're storing a register-sized value, when in
20066 fact we are not. We need to adjust the offset of the
20067 storage location to reflect the actual value's bytes,
20068 else gdb will not be able to display it. */
20069 if (maybe_ne (offset, 0))
20070 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20071 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20072 }
20073
20074 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20075 and will have been substituted directly into all expressions that use it.
20076 C does not have such a concept, but C++ and other languages do. */
20077 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20078 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20079
20080 if (rtl)
20081 rtl = targetm.delegitimize_address (rtl);
20082
20083 /* If we don't look past the constant pool, we risk emitting a
20084 reference to a constant pool entry that isn't referenced from
20085 code, and thus is not emitted. */
20086 if (rtl)
20087 rtl = avoid_constant_pool_reference (rtl);
20088
20089 /* Try harder to get a rtl. If this symbol ends up not being emitted
20090 in the current CU, resolve_addr will remove the expression referencing
20091 it. */
20092 if (rtl == NULL_RTX
20093 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20094 && VAR_P (decl)
20095 && !DECL_EXTERNAL (decl)
20096 && TREE_STATIC (decl)
20097 && DECL_NAME (decl)
20098 && !DECL_HARD_REGISTER (decl)
20099 && DECL_MODE (decl) != VOIDmode)
20100 {
20101 rtl = make_decl_rtl_for_debug (decl);
20102 if (!MEM_P (rtl)
20103 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20104 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20105 rtl = NULL_RTX;
20106 }
20107
20108 return rtl;
20109 }
20110
20111 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20112 returned. If so, the decl for the COMMON block is returned, and the
20113 value is the offset into the common block for the symbol. */
20114
20115 static tree
20116 fortran_common (tree decl, HOST_WIDE_INT *value)
20117 {
20118 tree val_expr, cvar;
20119 machine_mode mode;
20120 poly_int64 bitsize, bitpos;
20121 tree offset;
20122 HOST_WIDE_INT cbitpos;
20123 int unsignedp, reversep, volatilep = 0;
20124
20125 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20126 it does not have a value (the offset into the common area), or if it
20127 is thread local (as opposed to global) then it isn't common, and shouldn't
20128 be handled as such. */
20129 if (!VAR_P (decl)
20130 || !TREE_STATIC (decl)
20131 || !DECL_HAS_VALUE_EXPR_P (decl)
20132 || !is_fortran ())
20133 return NULL_TREE;
20134
20135 val_expr = DECL_VALUE_EXPR (decl);
20136 if (TREE_CODE (val_expr) != COMPONENT_REF)
20137 return NULL_TREE;
20138
20139 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20140 &unsignedp, &reversep, &volatilep);
20141
20142 if (cvar == NULL_TREE
20143 || !VAR_P (cvar)
20144 || DECL_ARTIFICIAL (cvar)
20145 || !TREE_PUBLIC (cvar)
20146 /* We don't expect to have to cope with variable offsets,
20147 since at present all static data must have a constant size. */
20148 || !bitpos.is_constant (&cbitpos))
20149 return NULL_TREE;
20150
20151 *value = 0;
20152 if (offset != NULL)
20153 {
20154 if (!tree_fits_shwi_p (offset))
20155 return NULL_TREE;
20156 *value = tree_to_shwi (offset);
20157 }
20158 if (cbitpos != 0)
20159 *value += cbitpos / BITS_PER_UNIT;
20160
20161 return cvar;
20162 }
20163
20164 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20165 data attribute for a variable or a parameter. We generate the
20166 DW_AT_const_value attribute only in those cases where the given variable
20167 or parameter does not have a true "location" either in memory or in a
20168 register. This can happen (for example) when a constant is passed as an
20169 actual argument in a call to an inline function. (It's possible that
20170 these things can crop up in other ways also.) Note that one type of
20171 constant value which can be passed into an inlined function is a constant
20172 pointer. This can happen for example if an actual argument in an inlined
20173 function call evaluates to a compile-time constant address.
20174
20175 CACHE_P is true if it is worth caching the location list for DECL,
20176 so that future calls can reuse it rather than regenerate it from scratch.
20177 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20178 since we will need to refer to them each time the function is inlined. */
20179
20180 static bool
20181 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20182 {
20183 rtx rtl;
20184 dw_loc_list_ref list;
20185 var_loc_list *loc_list;
20186 cached_dw_loc_list *cache;
20187
20188 if (early_dwarf)
20189 return false;
20190
20191 if (TREE_CODE (decl) == ERROR_MARK)
20192 return false;
20193
20194 if (get_AT (die, DW_AT_location)
20195 || get_AT (die, DW_AT_const_value))
20196 return true;
20197
20198 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20199 || TREE_CODE (decl) == RESULT_DECL);
20200
20201 /* Try to get some constant RTL for this decl, and use that as the value of
20202 the location. */
20203
20204 rtl = rtl_for_decl_location (decl);
20205 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20206 && add_const_value_attribute (die, rtl))
20207 return true;
20208
20209 /* See if we have single element location list that is equivalent to
20210 a constant value. That way we are better to use add_const_value_attribute
20211 rather than expanding constant value equivalent. */
20212 loc_list = lookup_decl_loc (decl);
20213 if (loc_list
20214 && loc_list->first
20215 && loc_list->first->next == NULL
20216 && NOTE_P (loc_list->first->loc)
20217 && NOTE_VAR_LOCATION (loc_list->first->loc)
20218 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20219 {
20220 struct var_loc_node *node;
20221
20222 node = loc_list->first;
20223 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20224 if (GET_CODE (rtl) == EXPR_LIST)
20225 rtl = XEXP (rtl, 0);
20226 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20227 && add_const_value_attribute (die, rtl))
20228 return true;
20229 }
20230 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20231 list several times. See if we've already cached the contents. */
20232 list = NULL;
20233 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20234 cache_p = false;
20235 if (cache_p)
20236 {
20237 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20238 if (cache)
20239 list = cache->loc_list;
20240 }
20241 if (list == NULL)
20242 {
20243 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20244 NULL);
20245 /* It is usually worth caching this result if the decl is from
20246 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20247 if (cache_p && list && list->dw_loc_next)
20248 {
20249 cached_dw_loc_list **slot
20250 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20251 DECL_UID (decl),
20252 INSERT);
20253 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20254 cache->decl_id = DECL_UID (decl);
20255 cache->loc_list = list;
20256 *slot = cache;
20257 }
20258 }
20259 if (list)
20260 {
20261 add_AT_location_description (die, DW_AT_location, list);
20262 return true;
20263 }
20264 /* None of that worked, so it must not really have a location;
20265 try adding a constant value attribute from the DECL_INITIAL. */
20266 return tree_add_const_value_attribute_for_decl (die, decl);
20267 }
20268
20269 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20270 attribute is the const value T. */
20271
20272 static bool
20273 tree_add_const_value_attribute (dw_die_ref die, tree t)
20274 {
20275 tree init;
20276 tree type = TREE_TYPE (t);
20277 rtx rtl;
20278
20279 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20280 return false;
20281
20282 init = t;
20283 gcc_assert (!DECL_P (init));
20284
20285 if (TREE_CODE (init) == INTEGER_CST)
20286 {
20287 if (tree_fits_uhwi_p (init))
20288 {
20289 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20290 return true;
20291 }
20292 if (tree_fits_shwi_p (init))
20293 {
20294 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20295 return true;
20296 }
20297 }
20298 if (! early_dwarf)
20299 {
20300 rtl = rtl_for_decl_init (init, type);
20301 if (rtl)
20302 return add_const_value_attribute (die, rtl);
20303 }
20304 /* If the host and target are sane, try harder. */
20305 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20306 && initializer_constant_valid_p (init, type))
20307 {
20308 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20309 if (size > 0 && (int) size == size)
20310 {
20311 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20312
20313 if (native_encode_initializer (init, array, size) == size)
20314 {
20315 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20316 return true;
20317 }
20318 ggc_free (array);
20319 }
20320 }
20321 return false;
20322 }
20323
20324 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20325 attribute is the const value of T, where T is an integral constant
20326 variable with static storage duration
20327 (so it can't be a PARM_DECL or a RESULT_DECL). */
20328
20329 static bool
20330 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20331 {
20332
20333 if (!decl
20334 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20335 || (VAR_P (decl) && !TREE_STATIC (decl)))
20336 return false;
20337
20338 if (TREE_READONLY (decl)
20339 && ! TREE_THIS_VOLATILE (decl)
20340 && DECL_INITIAL (decl))
20341 /* OK */;
20342 else
20343 return false;
20344
20345 /* Don't add DW_AT_const_value if abstract origin already has one. */
20346 if (get_AT (var_die, DW_AT_const_value))
20347 return false;
20348
20349 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20350 }
20351
20352 /* Convert the CFI instructions for the current function into a
20353 location list. This is used for DW_AT_frame_base when we targeting
20354 a dwarf2 consumer that does not support the dwarf3
20355 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20356 expressions. */
20357
20358 static dw_loc_list_ref
20359 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20360 {
20361 int ix;
20362 dw_fde_ref fde;
20363 dw_loc_list_ref list, *list_tail;
20364 dw_cfi_ref cfi;
20365 dw_cfa_location last_cfa, next_cfa;
20366 const char *start_label, *last_label, *section;
20367 dw_cfa_location remember;
20368
20369 fde = cfun->fde;
20370 gcc_assert (fde != NULL);
20371
20372 section = secname_for_decl (current_function_decl);
20373 list_tail = &list;
20374 list = NULL;
20375
20376 memset (&next_cfa, 0, sizeof (next_cfa));
20377 next_cfa.reg = INVALID_REGNUM;
20378 remember = next_cfa;
20379
20380 start_label = fde->dw_fde_begin;
20381
20382 /* ??? Bald assumption that the CIE opcode list does not contain
20383 advance opcodes. */
20384 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20385 lookup_cfa_1 (cfi, &next_cfa, &remember);
20386
20387 last_cfa = next_cfa;
20388 last_label = start_label;
20389
20390 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20391 {
20392 /* If the first partition contained no CFI adjustments, the
20393 CIE opcodes apply to the whole first partition. */
20394 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20395 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20396 list_tail =&(*list_tail)->dw_loc_next;
20397 start_label = last_label = fde->dw_fde_second_begin;
20398 }
20399
20400 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20401 {
20402 switch (cfi->dw_cfi_opc)
20403 {
20404 case DW_CFA_set_loc:
20405 case DW_CFA_advance_loc1:
20406 case DW_CFA_advance_loc2:
20407 case DW_CFA_advance_loc4:
20408 if (!cfa_equal_p (&last_cfa, &next_cfa))
20409 {
20410 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20411 start_label, 0, last_label, 0, section);
20412
20413 list_tail = &(*list_tail)->dw_loc_next;
20414 last_cfa = next_cfa;
20415 start_label = last_label;
20416 }
20417 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20418 break;
20419
20420 case DW_CFA_advance_loc:
20421 /* The encoding is complex enough that we should never emit this. */
20422 gcc_unreachable ();
20423
20424 default:
20425 lookup_cfa_1 (cfi, &next_cfa, &remember);
20426 break;
20427 }
20428 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20429 {
20430 if (!cfa_equal_p (&last_cfa, &next_cfa))
20431 {
20432 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20433 start_label, 0, last_label, 0, section);
20434
20435 list_tail = &(*list_tail)->dw_loc_next;
20436 last_cfa = next_cfa;
20437 start_label = last_label;
20438 }
20439 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20440 start_label, 0, fde->dw_fde_end, 0, section);
20441 list_tail = &(*list_tail)->dw_loc_next;
20442 start_label = last_label = fde->dw_fde_second_begin;
20443 }
20444 }
20445
20446 if (!cfa_equal_p (&last_cfa, &next_cfa))
20447 {
20448 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20449 start_label, 0, last_label, 0, section);
20450 list_tail = &(*list_tail)->dw_loc_next;
20451 start_label = last_label;
20452 }
20453
20454 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20455 start_label, 0,
20456 fde->dw_fde_second_begin
20457 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20458 section);
20459
20460 maybe_gen_llsym (list);
20461
20462 return list;
20463 }
20464
20465 /* Compute a displacement from the "steady-state frame pointer" to the
20466 frame base (often the same as the CFA), and store it in
20467 frame_pointer_fb_offset. OFFSET is added to the displacement
20468 before the latter is negated. */
20469
20470 static void
20471 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20472 {
20473 rtx reg, elim;
20474
20475 #ifdef FRAME_POINTER_CFA_OFFSET
20476 reg = frame_pointer_rtx;
20477 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20478 #else
20479 reg = arg_pointer_rtx;
20480 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20481 #endif
20482
20483 elim = (ira_use_lra_p
20484 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20485 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20486 elim = strip_offset_and_add (elim, &offset);
20487
20488 frame_pointer_fb_offset = -offset;
20489
20490 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20491 in which to eliminate. This is because it's stack pointer isn't
20492 directly accessible as a register within the ISA. To work around
20493 this, assume that while we cannot provide a proper value for
20494 frame_pointer_fb_offset, we won't need one either. We can use
20495 hard frame pointer in debug info even if frame pointer isn't used
20496 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20497 which uses the DW_AT_frame_base attribute, not hard frame pointer
20498 directly. */
20499 frame_pointer_fb_offset_valid
20500 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20501 }
20502
20503 /* Generate a DW_AT_name attribute given some string value to be included as
20504 the value of the attribute. */
20505
20506 static void
20507 add_name_attribute (dw_die_ref die, const char *name_string)
20508 {
20509 if (name_string != NULL && *name_string != 0)
20510 {
20511 if (demangle_name_func)
20512 name_string = (*demangle_name_func) (name_string);
20513
20514 add_AT_string (die, DW_AT_name, name_string);
20515 }
20516 }
20517
20518 /* Generate a DW_AT_description attribute given some string value to be included
20519 as the value of the attribute. */
20520
20521 static void
20522 add_desc_attribute (dw_die_ref die, const char *name_string)
20523 {
20524 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20525 return;
20526
20527 if (name_string == NULL || *name_string == 0)
20528 return;
20529
20530 if (demangle_name_func)
20531 name_string = (*demangle_name_func) (name_string);
20532
20533 add_AT_string (die, DW_AT_description, name_string);
20534 }
20535
20536 /* Generate a DW_AT_description attribute given some decl to be included
20537 as the value of the attribute. */
20538
20539 static void
20540 add_desc_attribute (dw_die_ref die, tree decl)
20541 {
20542 tree decl_name;
20543
20544 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20545 return;
20546
20547 if (decl == NULL_TREE || !DECL_P (decl))
20548 return;
20549 decl_name = DECL_NAME (decl);
20550
20551 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20552 {
20553 const char *name = dwarf2_name (decl, 0);
20554 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20555 }
20556 else
20557 {
20558 char *desc = print_generic_expr_to_str (decl);
20559 add_desc_attribute (die, desc);
20560 free (desc);
20561 }
20562 }
20563
20564 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20565 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20566 of TYPE accordingly.
20567
20568 ??? This is a temporary measure until after we're able to generate
20569 regular DWARF for the complex Ada type system. */
20570
20571 static void
20572 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20573 dw_die_ref context_die)
20574 {
20575 tree dtype;
20576 dw_die_ref dtype_die;
20577
20578 if (!lang_hooks.types.descriptive_type)
20579 return;
20580
20581 dtype = lang_hooks.types.descriptive_type (type);
20582 if (!dtype)
20583 return;
20584
20585 dtype_die = lookup_type_die (dtype);
20586 if (!dtype_die)
20587 {
20588 gen_type_die (dtype, context_die);
20589 dtype_die = lookup_type_die (dtype);
20590 gcc_assert (dtype_die);
20591 }
20592
20593 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20594 }
20595
20596 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20597
20598 static const char *
20599 comp_dir_string (void)
20600 {
20601 const char *wd;
20602 char *wd_plus_sep = NULL;
20603 static const char *cached_wd = NULL;
20604
20605 if (cached_wd != NULL)
20606 return cached_wd;
20607
20608 wd = get_src_pwd ();
20609 if (wd == NULL)
20610 return NULL;
20611
20612 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20613 {
20614 size_t wdlen = strlen (wd);
20615 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20616 strcpy (wd_plus_sep, wd);
20617 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20618 wd_plus_sep [wdlen + 1] = 0;
20619 wd = wd_plus_sep;
20620 }
20621
20622 cached_wd = remap_debug_filename (wd);
20623
20624 /* remap_debug_filename can just pass through wd or return a new gc string.
20625 These two types can't be both stored in a GTY(())-tagged string, but since
20626 the cached value lives forever just copy it if needed. */
20627 if (cached_wd != wd)
20628 {
20629 cached_wd = xstrdup (cached_wd);
20630 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20631 free (wd_plus_sep);
20632 }
20633
20634 return cached_wd;
20635 }
20636
20637 /* Generate a DW_AT_comp_dir attribute for DIE. */
20638
20639 static void
20640 add_comp_dir_attribute (dw_die_ref die)
20641 {
20642 const char * wd = comp_dir_string ();
20643 if (wd != NULL)
20644 add_AT_string (die, DW_AT_comp_dir, wd);
20645 }
20646
20647 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20648 pointer computation, ...), output a representation for that bound according
20649 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20650 loc_list_from_tree for the meaning of CONTEXT. */
20651
20652 static void
20653 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20654 int forms, struct loc_descr_context *context)
20655 {
20656 dw_die_ref context_die, decl_die = NULL;
20657 dw_loc_list_ref list;
20658 bool strip_conversions = true;
20659 bool placeholder_seen = false;
20660
20661 while (strip_conversions)
20662 switch (TREE_CODE (value))
20663 {
20664 case ERROR_MARK:
20665 case SAVE_EXPR:
20666 return;
20667
20668 CASE_CONVERT:
20669 case VIEW_CONVERT_EXPR:
20670 value = TREE_OPERAND (value, 0);
20671 break;
20672
20673 default:
20674 strip_conversions = false;
20675 break;
20676 }
20677
20678 /* If possible and permitted, output the attribute as a constant. */
20679 if ((forms & dw_scalar_form_constant) != 0
20680 && TREE_CODE (value) == INTEGER_CST)
20681 {
20682 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20683
20684 /* If HOST_WIDE_INT is big enough then represent the bound as
20685 a constant value. We need to choose a form based on
20686 whether the type is signed or unsigned. We cannot just
20687 call add_AT_unsigned if the value itself is positive
20688 (add_AT_unsigned might add the unsigned value encoded as
20689 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20690 bounds type and then sign extend any unsigned values found
20691 for signed types. This is needed only for
20692 DW_AT_{lower,upper}_bound, since for most other attributes,
20693 consumers will treat DW_FORM_data[1248] as unsigned values,
20694 regardless of the underlying type. */
20695 if (prec <= HOST_BITS_PER_WIDE_INT
20696 || tree_fits_uhwi_p (value))
20697 {
20698 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20699 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20700 else
20701 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20702 }
20703 else
20704 /* Otherwise represent the bound as an unsigned value with
20705 the precision of its type. The precision and signedness
20706 of the type will be necessary to re-interpret it
20707 unambiguously. */
20708 add_AT_wide (die, attr, wi::to_wide (value));
20709 return;
20710 }
20711
20712 /* Otherwise, if it's possible and permitted too, output a reference to
20713 another DIE. */
20714 if ((forms & dw_scalar_form_reference) != 0)
20715 {
20716 tree decl = NULL_TREE;
20717
20718 /* Some type attributes reference an outer type. For instance, the upper
20719 bound of an array may reference an embedding record (this happens in
20720 Ada). */
20721 if (TREE_CODE (value) == COMPONENT_REF
20722 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20723 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20724 decl = TREE_OPERAND (value, 1);
20725
20726 else if (VAR_P (value)
20727 || TREE_CODE (value) == PARM_DECL
20728 || TREE_CODE (value) == RESULT_DECL)
20729 decl = value;
20730
20731 if (decl != NULL_TREE)
20732 {
20733 decl_die = lookup_decl_die (decl);
20734
20735 /* ??? Can this happen, or should the variable have been bound
20736 first? Probably it can, since I imagine that we try to create
20737 the types of parameters in the order in which they exist in
20738 the list, and won't have created a forward reference to a
20739 later parameter. */
20740 if (decl_die != NULL)
20741 {
20742 if (get_AT (decl_die, DW_AT_location)
20743 || get_AT (decl_die, DW_AT_data_member_location)
20744 || get_AT (decl_die, DW_AT_const_value))
20745 {
20746 add_AT_die_ref (die, attr, decl_die);
20747 return;
20748 }
20749 }
20750 }
20751 }
20752
20753 /* Last chance: try to create a stack operation procedure to evaluate the
20754 value. Do nothing if even that is not possible or permitted. */
20755 if ((forms & dw_scalar_form_exprloc) == 0)
20756 return;
20757
20758 list = loc_list_from_tree (value, 2, context);
20759 if (context && context->placeholder_arg)
20760 {
20761 placeholder_seen = context->placeholder_seen;
20762 context->placeholder_seen = false;
20763 }
20764 if (list == NULL || single_element_loc_list_p (list))
20765 {
20766 /* If this attribute is not a reference nor constant, it is
20767 a DWARF expression rather than location description. For that
20768 loc_list_from_tree (value, 0, &context) is needed. */
20769 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20770 if (list2 && single_element_loc_list_p (list2))
20771 {
20772 if (placeholder_seen)
20773 {
20774 struct dwarf_procedure_info dpi;
20775 dpi.fndecl = NULL_TREE;
20776 dpi.args_count = 1;
20777 if (!resolve_args_picking (list2->expr, 1, &dpi))
20778 return;
20779 }
20780 add_AT_loc (die, attr, list2->expr);
20781 return;
20782 }
20783 }
20784
20785 /* If that failed to give a single element location list, fall back to
20786 outputting this as a reference... still if permitted. */
20787 if (list == NULL
20788 || (forms & dw_scalar_form_reference) == 0
20789 || placeholder_seen)
20790 return;
20791
20792 if (!decl_die)
20793 {
20794 if (current_function_decl == 0)
20795 context_die = comp_unit_die ();
20796 else
20797 context_die = lookup_decl_die (current_function_decl);
20798
20799 decl_die = new_die (DW_TAG_variable, context_die, value);
20800 add_AT_flag (decl_die, DW_AT_artificial, 1);
20801 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20802 context_die);
20803 }
20804
20805 add_AT_location_description (decl_die, DW_AT_location, list);
20806 add_AT_die_ref (die, attr, decl_die);
20807 }
20808
20809 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20810 default. */
20811
20812 static int
20813 lower_bound_default (void)
20814 {
20815 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20816 {
20817 case DW_LANG_C:
20818 case DW_LANG_C89:
20819 case DW_LANG_C99:
20820 case DW_LANG_C11:
20821 case DW_LANG_C_plus_plus:
20822 case DW_LANG_C_plus_plus_11:
20823 case DW_LANG_C_plus_plus_14:
20824 case DW_LANG_ObjC:
20825 case DW_LANG_ObjC_plus_plus:
20826 return 0;
20827 case DW_LANG_Fortran77:
20828 case DW_LANG_Fortran90:
20829 case DW_LANG_Fortran95:
20830 case DW_LANG_Fortran03:
20831 case DW_LANG_Fortran08:
20832 return 1;
20833 case DW_LANG_UPC:
20834 case DW_LANG_D:
20835 case DW_LANG_Python:
20836 return dwarf_version >= 4 ? 0 : -1;
20837 case DW_LANG_Ada95:
20838 case DW_LANG_Ada83:
20839 case DW_LANG_Cobol74:
20840 case DW_LANG_Cobol85:
20841 case DW_LANG_Modula2:
20842 case DW_LANG_PLI:
20843 return dwarf_version >= 4 ? 1 : -1;
20844 default:
20845 return -1;
20846 }
20847 }
20848
20849 /* Given a tree node describing an array bound (either lower or upper) output
20850 a representation for that bound. */
20851
20852 static void
20853 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20854 tree bound, struct loc_descr_context *context)
20855 {
20856 int dflt;
20857
20858 while (1)
20859 switch (TREE_CODE (bound))
20860 {
20861 /* Strip all conversions. */
20862 CASE_CONVERT:
20863 case VIEW_CONVERT_EXPR:
20864 bound = TREE_OPERAND (bound, 0);
20865 break;
20866
20867 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20868 are even omitted when they are the default. */
20869 case INTEGER_CST:
20870 /* If the value for this bound is the default one, we can even omit the
20871 attribute. */
20872 if (bound_attr == DW_AT_lower_bound
20873 && tree_fits_shwi_p (bound)
20874 && (dflt = lower_bound_default ()) != -1
20875 && tree_to_shwi (bound) == dflt)
20876 return;
20877
20878 /* FALLTHRU */
20879
20880 default:
20881 /* Because of the complex interaction there can be with other GNAT
20882 encodings, GDB isn't ready yet to handle proper DWARF description
20883 for self-referencial subrange bounds: let GNAT encodings do the
20884 magic in such a case. */
20885 if (is_ada ()
20886 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20887 && contains_placeholder_p (bound))
20888 return;
20889
20890 add_scalar_info (subrange_die, bound_attr, bound,
20891 dw_scalar_form_constant
20892 | dw_scalar_form_exprloc
20893 | dw_scalar_form_reference,
20894 context);
20895 return;
20896 }
20897 }
20898
20899 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20900 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20901 Note that the block of subscript information for an array type also
20902 includes information about the element type of the given array type.
20903
20904 This function reuses previously set type and bound information if
20905 available. */
20906
20907 static void
20908 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20909 {
20910 unsigned dimension_number;
20911 tree lower, upper;
20912 dw_die_ref child = type_die->die_child;
20913
20914 for (dimension_number = 0;
20915 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20916 type = TREE_TYPE (type), dimension_number++)
20917 {
20918 tree domain = TYPE_DOMAIN (type);
20919
20920 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20921 break;
20922
20923 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20924 and (in GNU C only) variable bounds. Handle all three forms
20925 here. */
20926
20927 /* Find and reuse a previously generated DW_TAG_subrange_type if
20928 available.
20929
20930 For multi-dimensional arrays, as we iterate through the
20931 various dimensions in the enclosing for loop above, we also
20932 iterate through the DIE children and pick at each
20933 DW_TAG_subrange_type previously generated (if available).
20934 Each child DW_TAG_subrange_type DIE describes the range of
20935 the current dimension. At this point we should have as many
20936 DW_TAG_subrange_type's as we have dimensions in the
20937 array. */
20938 dw_die_ref subrange_die = NULL;
20939 if (child)
20940 while (1)
20941 {
20942 child = child->die_sib;
20943 if (child->die_tag == DW_TAG_subrange_type)
20944 subrange_die = child;
20945 if (child == type_die->die_child)
20946 {
20947 /* If we wrapped around, stop looking next time. */
20948 child = NULL;
20949 break;
20950 }
20951 if (child->die_tag == DW_TAG_subrange_type)
20952 break;
20953 }
20954 if (!subrange_die)
20955 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20956
20957 if (domain)
20958 {
20959 /* We have an array type with specified bounds. */
20960 lower = TYPE_MIN_VALUE (domain);
20961 upper = TYPE_MAX_VALUE (domain);
20962
20963 /* Define the index type. */
20964 if (TREE_TYPE (domain)
20965 && !get_AT (subrange_die, DW_AT_type))
20966 {
20967 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20968 TREE_TYPE field. We can't emit debug info for this
20969 because it is an unnamed integral type. */
20970 if (TREE_CODE (domain) == INTEGER_TYPE
20971 && TYPE_NAME (domain) == NULL_TREE
20972 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20973 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20974 ;
20975 else
20976 add_type_attribute (subrange_die, TREE_TYPE (domain),
20977 TYPE_UNQUALIFIED, false, type_die);
20978 }
20979
20980 /* ??? If upper is NULL, the array has unspecified length,
20981 but it does have a lower bound. This happens with Fortran
20982 dimension arr(N:*)
20983 Since the debugger is definitely going to need to know N
20984 to produce useful results, go ahead and output the lower
20985 bound solo, and hope the debugger can cope. */
20986
20987 if (!get_AT (subrange_die, DW_AT_lower_bound))
20988 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20989 if (!get_AT (subrange_die, DW_AT_upper_bound)
20990 && !get_AT (subrange_die, DW_AT_count))
20991 {
20992 if (upper)
20993 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20994 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
20995 /* Zero-length array. */
20996 add_bound_info (subrange_die, DW_AT_count,
20997 build_int_cst (TREE_TYPE (lower), 0), NULL);
20998 }
20999 }
21000
21001 /* Otherwise we have an array type with an unspecified length. The
21002 DWARF-2 spec does not say how to handle this; let's just leave out the
21003 bounds. */
21004 }
21005 }
21006
21007 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21008
21009 static void
21010 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21011 {
21012 dw_die_ref decl_die;
21013 HOST_WIDE_INT size;
21014 dw_loc_descr_ref size_expr = NULL;
21015
21016 switch (TREE_CODE (tree_node))
21017 {
21018 case ERROR_MARK:
21019 size = 0;
21020 break;
21021 case ENUMERAL_TYPE:
21022 case RECORD_TYPE:
21023 case UNION_TYPE:
21024 case QUAL_UNION_TYPE:
21025 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21026 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21027 {
21028 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21029 return;
21030 }
21031 size_expr = type_byte_size (tree_node, &size);
21032 break;
21033 case FIELD_DECL:
21034 /* For a data member of a struct or union, the DW_AT_byte_size is
21035 generally given as the number of bytes normally allocated for an
21036 object of the *declared* type of the member itself. This is true
21037 even for bit-fields. */
21038 size = int_size_in_bytes (field_type (tree_node));
21039 break;
21040 default:
21041 gcc_unreachable ();
21042 }
21043
21044 /* Support for dynamically-sized objects was introduced by DWARFv3.
21045 At the moment, GDB does not handle variable byte sizes very well,
21046 though. */
21047 if ((dwarf_version >= 3 || !dwarf_strict)
21048 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21049 && size_expr != NULL)
21050 add_AT_loc (die, DW_AT_byte_size, size_expr);
21051
21052 /* Note that `size' might be -1 when we get to this point. If it is, that
21053 indicates that the byte size of the entity in question is variable and
21054 that we could not generate a DWARF expression that computes it. */
21055 if (size >= 0)
21056 add_AT_unsigned (die, DW_AT_byte_size, size);
21057 }
21058
21059 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21060 alignment. */
21061
21062 static void
21063 add_alignment_attribute (dw_die_ref die, tree tree_node)
21064 {
21065 if (dwarf_version < 5 && dwarf_strict)
21066 return;
21067
21068 unsigned align;
21069
21070 if (DECL_P (tree_node))
21071 {
21072 if (!DECL_USER_ALIGN (tree_node))
21073 return;
21074
21075 align = DECL_ALIGN_UNIT (tree_node);
21076 }
21077 else if (TYPE_P (tree_node))
21078 {
21079 if (!TYPE_USER_ALIGN (tree_node))
21080 return;
21081
21082 align = TYPE_ALIGN_UNIT (tree_node);
21083 }
21084 else
21085 gcc_unreachable ();
21086
21087 add_AT_unsigned (die, DW_AT_alignment, align);
21088 }
21089
21090 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21091 which specifies the distance in bits from the highest order bit of the
21092 "containing object" for the bit-field to the highest order bit of the
21093 bit-field itself.
21094
21095 For any given bit-field, the "containing object" is a hypothetical object
21096 (of some integral or enum type) within which the given bit-field lives. The
21097 type of this hypothetical "containing object" is always the same as the
21098 declared type of the individual bit-field itself. The determination of the
21099 exact location of the "containing object" for a bit-field is rather
21100 complicated. It's handled by the `field_byte_offset' function (above).
21101
21102 Note that it is the size (in bytes) of the hypothetical "containing object"
21103 which will be given in the DW_AT_byte_size attribute for this bit-field.
21104 (See `byte_size_attribute' above). */
21105
21106 static inline void
21107 add_bit_offset_attribute (dw_die_ref die, tree decl)
21108 {
21109 HOST_WIDE_INT object_offset_in_bytes;
21110 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21111 HOST_WIDE_INT bitpos_int;
21112 HOST_WIDE_INT highest_order_object_bit_offset;
21113 HOST_WIDE_INT highest_order_field_bit_offset;
21114 HOST_WIDE_INT bit_offset;
21115
21116 /* The containing object is within the DECL_CONTEXT. */
21117 struct vlr_context ctx = { DECL_CONTEXT (decl), NULL_TREE };
21118
21119 field_byte_offset (decl, &ctx, &object_offset_in_bytes);
21120
21121 /* Must be a field and a bit field. */
21122 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21123
21124 /* We can't yet handle bit-fields whose offsets are variable, so if we
21125 encounter such things, just return without generating any attribute
21126 whatsoever. Likewise for variable or too large size. */
21127 if (! tree_fits_shwi_p (bit_position (decl))
21128 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21129 return;
21130
21131 bitpos_int = int_bit_position (decl);
21132
21133 /* Note that the bit offset is always the distance (in bits) from the
21134 highest-order bit of the "containing object" to the highest-order bit of
21135 the bit-field itself. Since the "high-order end" of any object or field
21136 is different on big-endian and little-endian machines, the computation
21137 below must take account of these differences. */
21138 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21139 highest_order_field_bit_offset = bitpos_int;
21140
21141 if (! BYTES_BIG_ENDIAN)
21142 {
21143 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21144 highest_order_object_bit_offset +=
21145 simple_type_size_in_bits (original_type);
21146 }
21147
21148 bit_offset
21149 = (! BYTES_BIG_ENDIAN
21150 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21151 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21152
21153 if (bit_offset < 0)
21154 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21155 else
21156 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21157 }
21158
21159 /* For a FIELD_DECL node which represents a bit field, output an attribute
21160 which specifies the length in bits of the given field. */
21161
21162 static inline void
21163 add_bit_size_attribute (dw_die_ref die, tree decl)
21164 {
21165 /* Must be a field and a bit field. */
21166 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21167 && DECL_BIT_FIELD_TYPE (decl));
21168
21169 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21170 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21171 }
21172
21173 /* If the compiled language is ANSI C, then add a 'prototyped'
21174 attribute, if arg types are given for the parameters of a function. */
21175
21176 static inline void
21177 add_prototyped_attribute (dw_die_ref die, tree func_type)
21178 {
21179 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21180 {
21181 case DW_LANG_C:
21182 case DW_LANG_C89:
21183 case DW_LANG_C99:
21184 case DW_LANG_C11:
21185 case DW_LANG_ObjC:
21186 if (prototype_p (func_type))
21187 add_AT_flag (die, DW_AT_prototyped, 1);
21188 break;
21189 default:
21190 break;
21191 }
21192 }
21193
21194 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21195 by looking in the type declaration, the object declaration equate table or
21196 the block mapping. */
21197
21198 static inline void
21199 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21200 {
21201 dw_die_ref origin_die = NULL;
21202
21203 /* For late LTO debug output we want to refer directly to the abstract
21204 DIE in the early debug rather to the possibly existing concrete
21205 instance and avoid creating that just for this purpose. */
21206 sym_off_pair *desc;
21207 if (in_lto_p
21208 && external_die_map
21209 && (desc = external_die_map->get (origin)))
21210 {
21211 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21212 desc->sym, desc->off);
21213 return;
21214 }
21215
21216 if (DECL_P (origin))
21217 origin_die = lookup_decl_die (origin);
21218 else if (TYPE_P (origin))
21219 origin_die = lookup_type_die (origin);
21220 else if (TREE_CODE (origin) == BLOCK)
21221 origin_die = lookup_block_die (origin);
21222
21223 /* XXX: Functions that are never lowered don't always have correct block
21224 trees (in the case of java, they simply have no block tree, in some other
21225 languages). For these functions, there is nothing we can really do to
21226 output correct debug info for inlined functions in all cases. Rather
21227 than die, we'll just produce deficient debug info now, in that we will
21228 have variables without a proper abstract origin. In the future, when all
21229 functions are lowered, we should re-add a gcc_assert (origin_die)
21230 here. */
21231
21232 if (origin_die)
21233 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21234 }
21235
21236 /* We do not currently support the pure_virtual attribute. */
21237
21238 static inline void
21239 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21240 {
21241 if (DECL_VINDEX (func_decl))
21242 {
21243 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21244
21245 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21246 add_AT_loc (die, DW_AT_vtable_elem_location,
21247 new_loc_descr (DW_OP_constu,
21248 tree_to_shwi (DECL_VINDEX (func_decl)),
21249 0));
21250
21251 /* GNU extension: Record what type this method came from originally. */
21252 if (debug_info_level > DINFO_LEVEL_TERSE
21253 && DECL_CONTEXT (func_decl))
21254 add_AT_die_ref (die, DW_AT_containing_type,
21255 lookup_type_die (DECL_CONTEXT (func_decl)));
21256 }
21257 }
21258 \f
21259 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21260 given decl. This used to be a vendor extension until after DWARF 4
21261 standardized it. */
21262
21263 static void
21264 add_linkage_attr (dw_die_ref die, tree decl)
21265 {
21266 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21267
21268 /* Mimic what assemble_name_raw does with a leading '*'. */
21269 if (name[0] == '*')
21270 name = &name[1];
21271
21272 if (dwarf_version >= 4)
21273 add_AT_string (die, DW_AT_linkage_name, name);
21274 else
21275 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21276 }
21277
21278 /* Add source coordinate attributes for the given decl. */
21279
21280 static void
21281 add_src_coords_attributes (dw_die_ref die, tree decl)
21282 {
21283 expanded_location s;
21284
21285 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21286 return;
21287 s = expand_location (DECL_SOURCE_LOCATION (decl));
21288 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21289 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21290 if (debug_column_info && s.column)
21291 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21292 }
21293
21294 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21295
21296 static void
21297 add_linkage_name_raw (dw_die_ref die, tree decl)
21298 {
21299 /* Defer until we have an assembler name set. */
21300 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21301 {
21302 limbo_die_node *asm_name;
21303
21304 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21305 asm_name->die = die;
21306 asm_name->created_for = decl;
21307 asm_name->next = deferred_asm_name;
21308 deferred_asm_name = asm_name;
21309 }
21310 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21311 add_linkage_attr (die, decl);
21312 }
21313
21314 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21315
21316 static void
21317 add_linkage_name (dw_die_ref die, tree decl)
21318 {
21319 if (debug_info_level > DINFO_LEVEL_NONE
21320 && VAR_OR_FUNCTION_DECL_P (decl)
21321 && TREE_PUBLIC (decl)
21322 && !(VAR_P (decl) && DECL_REGISTER (decl))
21323 && die->die_tag != DW_TAG_member)
21324 add_linkage_name_raw (die, decl);
21325 }
21326
21327 /* Add a DW_AT_name attribute and source coordinate attribute for the
21328 given decl, but only if it actually has a name. */
21329
21330 static void
21331 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21332 bool no_linkage_name)
21333 {
21334 tree decl_name;
21335
21336 decl_name = DECL_NAME (decl);
21337 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21338 {
21339 const char *name = dwarf2_name (decl, 0);
21340 if (name)
21341 add_name_attribute (die, name);
21342 else
21343 add_desc_attribute (die, decl);
21344
21345 if (! DECL_ARTIFICIAL (decl))
21346 add_src_coords_attributes (die, decl);
21347
21348 if (!no_linkage_name)
21349 add_linkage_name (die, decl);
21350 }
21351 else
21352 add_desc_attribute (die, decl);
21353
21354 #ifdef VMS_DEBUGGING_INFO
21355 /* Get the function's name, as described by its RTL. This may be different
21356 from the DECL_NAME name used in the source file. */
21357 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21358 {
21359 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21360 XEXP (DECL_RTL (decl), 0), false);
21361 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21362 }
21363 #endif /* VMS_DEBUGGING_INFO */
21364 }
21365
21366 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21367
21368 static void
21369 add_discr_value (dw_die_ref die, dw_discr_value *value)
21370 {
21371 dw_attr_node attr;
21372
21373 attr.dw_attr = DW_AT_discr_value;
21374 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21375 attr.dw_attr_val.val_entry = NULL;
21376 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21377 if (value->pos)
21378 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21379 else
21380 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21381 add_dwarf_attr (die, &attr);
21382 }
21383
21384 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21385
21386 static void
21387 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21388 {
21389 dw_attr_node attr;
21390
21391 attr.dw_attr = DW_AT_discr_list;
21392 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21393 attr.dw_attr_val.val_entry = NULL;
21394 attr.dw_attr_val.v.val_discr_list = discr_list;
21395 add_dwarf_attr (die, &attr);
21396 }
21397
21398 static inline dw_discr_list_ref
21399 AT_discr_list (dw_attr_node *attr)
21400 {
21401 return attr->dw_attr_val.v.val_discr_list;
21402 }
21403
21404 #ifdef VMS_DEBUGGING_INFO
21405 /* Output the debug main pointer die for VMS */
21406
21407 void
21408 dwarf2out_vms_debug_main_pointer (void)
21409 {
21410 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21411 dw_die_ref die;
21412
21413 /* Allocate the VMS debug main subprogram die. */
21414 die = new_die_raw (DW_TAG_subprogram);
21415 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21416 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21417 current_function_funcdef_no);
21418 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21419
21420 /* Make it the first child of comp_unit_die (). */
21421 die->die_parent = comp_unit_die ();
21422 if (comp_unit_die ()->die_child)
21423 {
21424 die->die_sib = comp_unit_die ()->die_child->die_sib;
21425 comp_unit_die ()->die_child->die_sib = die;
21426 }
21427 else
21428 {
21429 die->die_sib = die;
21430 comp_unit_die ()->die_child = die;
21431 }
21432 }
21433 #endif /* VMS_DEBUGGING_INFO */
21434
21435 /* walk_tree helper function for uses_local_type, below. */
21436
21437 static tree
21438 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21439 {
21440 if (!TYPE_P (*tp))
21441 *walk_subtrees = 0;
21442 else
21443 {
21444 tree name = TYPE_NAME (*tp);
21445 if (name && DECL_P (name) && decl_function_context (name))
21446 return *tp;
21447 }
21448 return NULL_TREE;
21449 }
21450
21451 /* If TYPE involves a function-local type (including a local typedef to a
21452 non-local type), returns that type; otherwise returns NULL_TREE. */
21453
21454 static tree
21455 uses_local_type (tree type)
21456 {
21457 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21458 return used;
21459 }
21460
21461 /* Return the DIE for the scope that immediately contains this type.
21462 Non-named types that do not involve a function-local type get global
21463 scope. Named types nested in namespaces or other types get their
21464 containing scope. All other types (i.e. function-local named types) get
21465 the current active scope. */
21466
21467 static dw_die_ref
21468 scope_die_for (tree t, dw_die_ref context_die)
21469 {
21470 dw_die_ref scope_die = NULL;
21471 tree containing_scope;
21472
21473 /* Non-types always go in the current scope. */
21474 gcc_assert (TYPE_P (t));
21475
21476 /* Use the scope of the typedef, rather than the scope of the type
21477 it refers to. */
21478 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21479 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21480 else
21481 containing_scope = TYPE_CONTEXT (t);
21482
21483 /* Use the containing namespace if there is one. */
21484 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21485 {
21486 if (context_die == lookup_decl_die (containing_scope))
21487 /* OK */;
21488 else if (debug_info_level > DINFO_LEVEL_TERSE)
21489 context_die = get_context_die (containing_scope);
21490 else
21491 containing_scope = NULL_TREE;
21492 }
21493
21494 /* Ignore function type "scopes" from the C frontend. They mean that
21495 a tagged type is local to a parmlist of a function declarator, but
21496 that isn't useful to DWARF. */
21497 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21498 containing_scope = NULL_TREE;
21499
21500 if (SCOPE_FILE_SCOPE_P (containing_scope))
21501 {
21502 /* If T uses a local type keep it local as well, to avoid references
21503 to function-local DIEs from outside the function. */
21504 if (current_function_decl && uses_local_type (t))
21505 scope_die = context_die;
21506 else
21507 scope_die = comp_unit_die ();
21508 }
21509 else if (TYPE_P (containing_scope))
21510 {
21511 /* For types, we can just look up the appropriate DIE. */
21512 if (debug_info_level > DINFO_LEVEL_TERSE)
21513 scope_die = get_context_die (containing_scope);
21514 else
21515 {
21516 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21517 if (scope_die == NULL)
21518 scope_die = comp_unit_die ();
21519 }
21520 }
21521 else
21522 scope_die = context_die;
21523
21524 return scope_die;
21525 }
21526
21527 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21528
21529 static inline int
21530 local_scope_p (dw_die_ref context_die)
21531 {
21532 for (; context_die; context_die = context_die->die_parent)
21533 if (context_die->die_tag == DW_TAG_inlined_subroutine
21534 || context_die->die_tag == DW_TAG_subprogram)
21535 return 1;
21536
21537 return 0;
21538 }
21539
21540 /* Returns nonzero if CONTEXT_DIE is a class. */
21541
21542 static inline int
21543 class_scope_p (dw_die_ref context_die)
21544 {
21545 return (context_die
21546 && (context_die->die_tag == DW_TAG_structure_type
21547 || context_die->die_tag == DW_TAG_class_type
21548 || context_die->die_tag == DW_TAG_interface_type
21549 || context_die->die_tag == DW_TAG_union_type));
21550 }
21551
21552 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21553 whether or not to treat a DIE in this context as a declaration. */
21554
21555 static inline int
21556 class_or_namespace_scope_p (dw_die_ref context_die)
21557 {
21558 return (class_scope_p (context_die)
21559 || (context_die && context_die->die_tag == DW_TAG_namespace));
21560 }
21561
21562 /* Many forms of DIEs require a "type description" attribute. This
21563 routine locates the proper "type descriptor" die for the type given
21564 by 'type' plus any additional qualifiers given by 'cv_quals', and
21565 adds a DW_AT_type attribute below the given die. */
21566
21567 static void
21568 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21569 bool reverse, dw_die_ref context_die)
21570 {
21571 enum tree_code code = TREE_CODE (type);
21572 dw_die_ref type_die = NULL;
21573
21574 if (debug_info_level <= DINFO_LEVEL_TERSE)
21575 return;
21576
21577 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21578 or fixed-point type, use the inner type. This is because we have no
21579 support for unnamed types in base_type_die. This can happen if this is
21580 an Ada subrange type. Correct solution is emit a subrange type die. */
21581 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21582 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21583 type = TREE_TYPE (type), code = TREE_CODE (type);
21584
21585 if (code == ERROR_MARK
21586 /* Handle a special case. For functions whose return type is void, we
21587 generate *no* type attribute. (Note that no object may have type
21588 `void', so this only applies to function return types). */
21589 || code == VOID_TYPE)
21590 return;
21591
21592 type_die = modified_type_die (type,
21593 cv_quals | TYPE_QUALS (type),
21594 reverse,
21595 context_die);
21596
21597 if (type_die != NULL)
21598 add_AT_die_ref (object_die, DW_AT_type, type_die);
21599 }
21600
21601 /* Given an object die, add the calling convention attribute for the
21602 function call type. */
21603 static void
21604 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21605 {
21606 enum dwarf_calling_convention value = DW_CC_normal;
21607
21608 value = ((enum dwarf_calling_convention)
21609 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21610
21611 if (is_fortran ()
21612 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21613 {
21614 /* DWARF 2 doesn't provide a way to identify a program's source-level
21615 entry point. DW_AT_calling_convention attributes are only meant
21616 to describe functions' calling conventions. However, lacking a
21617 better way to signal the Fortran main program, we used this for
21618 a long time, following existing custom. Now, DWARF 4 has
21619 DW_AT_main_subprogram, which we add below, but some tools still
21620 rely on the old way, which we thus keep. */
21621 value = DW_CC_program;
21622
21623 if (dwarf_version >= 4 || !dwarf_strict)
21624 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21625 }
21626
21627 /* Only add the attribute if the backend requests it, and
21628 is not DW_CC_normal. */
21629 if (value && (value != DW_CC_normal))
21630 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21631 }
21632
21633 /* Given a tree pointer to a struct, class, union, or enum type node, return
21634 a pointer to the (string) tag name for the given type, or zero if the type
21635 was declared without a tag. */
21636
21637 static const char *
21638 type_tag (const_tree type)
21639 {
21640 const char *name = 0;
21641
21642 if (TYPE_NAME (type) != 0)
21643 {
21644 tree t = 0;
21645
21646 /* Find the IDENTIFIER_NODE for the type name. */
21647 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21648 && !TYPE_NAMELESS (type))
21649 t = TYPE_NAME (type);
21650
21651 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21652 a TYPE_DECL node, regardless of whether or not a `typedef' was
21653 involved. */
21654 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21655 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21656 {
21657 /* We want to be extra verbose. Don't call dwarf_name if
21658 DECL_NAME isn't set. The default hook for decl_printable_name
21659 doesn't like that, and in this context it's correct to return
21660 0, instead of "<anonymous>" or the like. */
21661 if (DECL_NAME (TYPE_NAME (type))
21662 && !DECL_NAMELESS (TYPE_NAME (type)))
21663 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21664 }
21665
21666 /* Now get the name as a string, or invent one. */
21667 if (!name && t != 0)
21668 name = IDENTIFIER_POINTER (t);
21669 }
21670
21671 return (name == 0 || *name == '\0') ? 0 : name;
21672 }
21673
21674 /* Return the type associated with a data member, make a special check
21675 for bit field types. */
21676
21677 static inline tree
21678 member_declared_type (const_tree member)
21679 {
21680 return (DECL_BIT_FIELD_TYPE (member)
21681 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21682 }
21683
21684 /* Get the decl's label, as described by its RTL. This may be different
21685 from the DECL_NAME name used in the source file. */
21686
21687 #if 0
21688 static const char *
21689 decl_start_label (tree decl)
21690 {
21691 rtx x;
21692 const char *fnname;
21693
21694 x = DECL_RTL (decl);
21695 gcc_assert (MEM_P (x));
21696
21697 x = XEXP (x, 0);
21698 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21699
21700 fnname = XSTR (x, 0);
21701 return fnname;
21702 }
21703 #endif
21704 \f
21705 /* For variable-length arrays that have been previously generated, but
21706 may be incomplete due to missing subscript info, fill the subscript
21707 info. Return TRUE if this is one of those cases. */
21708 static bool
21709 fill_variable_array_bounds (tree type)
21710 {
21711 if (TREE_ASM_WRITTEN (type)
21712 && TREE_CODE (type) == ARRAY_TYPE
21713 && variably_modified_type_p (type, NULL))
21714 {
21715 dw_die_ref array_die = lookup_type_die (type);
21716 if (!array_die)
21717 return false;
21718 add_subscript_info (array_die, type, !is_ada ());
21719 return true;
21720 }
21721 return false;
21722 }
21723
21724 /* These routines generate the internal representation of the DIE's for
21725 the compilation unit. Debugging information is collected by walking
21726 the declaration trees passed in from dwarf2out_decl(). */
21727
21728 static void
21729 gen_array_type_die (tree type, dw_die_ref context_die)
21730 {
21731 dw_die_ref array_die;
21732
21733 /* GNU compilers represent multidimensional array types as sequences of one
21734 dimensional array types whose element types are themselves array types.
21735 We sometimes squish that down to a single array_type DIE with multiple
21736 subscripts in the Dwarf debugging info. The draft Dwarf specification
21737 say that we are allowed to do this kind of compression in C, because
21738 there is no difference between an array of arrays and a multidimensional
21739 array. We don't do this for Ada to remain as close as possible to the
21740 actual representation, which is especially important against the language
21741 flexibilty wrt arrays of variable size. */
21742
21743 bool collapse_nested_arrays = !is_ada ();
21744
21745 if (fill_variable_array_bounds (type))
21746 return;
21747
21748 dw_die_ref scope_die = scope_die_for (type, context_die);
21749 tree element_type;
21750
21751 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21752 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21753 if (TREE_CODE (type) == ARRAY_TYPE
21754 && TYPE_STRING_FLAG (type)
21755 && is_fortran ()
21756 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21757 {
21758 HOST_WIDE_INT size;
21759
21760 array_die = new_die (DW_TAG_string_type, scope_die, type);
21761 add_name_attribute (array_die, type_tag (type));
21762 equate_type_number_to_die (type, array_die);
21763 size = int_size_in_bytes (type);
21764 if (size >= 0)
21765 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21766 /* ??? We can't annotate types late, but for LTO we may not
21767 generate a location early either (gfortran.dg/save_6.f90). */
21768 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21769 && TYPE_DOMAIN (type) != NULL_TREE
21770 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21771 {
21772 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21773 tree rszdecl = szdecl;
21774
21775 size = int_size_in_bytes (TREE_TYPE (szdecl));
21776 if (!DECL_P (szdecl))
21777 {
21778 if (TREE_CODE (szdecl) == INDIRECT_REF
21779 && DECL_P (TREE_OPERAND (szdecl, 0)))
21780 {
21781 rszdecl = TREE_OPERAND (szdecl, 0);
21782 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21783 != DWARF2_ADDR_SIZE)
21784 size = 0;
21785 }
21786 else
21787 size = 0;
21788 }
21789 if (size > 0)
21790 {
21791 dw_loc_list_ref loc
21792 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21793 NULL);
21794 if (loc)
21795 {
21796 add_AT_location_description (array_die, DW_AT_string_length,
21797 loc);
21798 if (size != DWARF2_ADDR_SIZE)
21799 add_AT_unsigned (array_die, dwarf_version >= 5
21800 ? DW_AT_string_length_byte_size
21801 : DW_AT_byte_size, size);
21802 }
21803 }
21804 }
21805 return;
21806 }
21807
21808 array_die = new_die (DW_TAG_array_type, scope_die, type);
21809 add_name_attribute (array_die, type_tag (type));
21810 equate_type_number_to_die (type, array_die);
21811
21812 if (TREE_CODE (type) == VECTOR_TYPE)
21813 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21814
21815 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21816 if (is_fortran ()
21817 && TREE_CODE (type) == ARRAY_TYPE
21818 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21819 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21820 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21821
21822 #if 0
21823 /* We default the array ordering. Debuggers will probably do the right
21824 things even if DW_AT_ordering is not present. It's not even an issue
21825 until we start to get into multidimensional arrays anyway. If a debugger
21826 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21827 then we'll have to put the DW_AT_ordering attribute back in. (But if
21828 and when we find out that we need to put these in, we will only do so
21829 for multidimensional arrays. */
21830 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21831 #endif
21832
21833 if (TREE_CODE (type) == VECTOR_TYPE)
21834 {
21835 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21836 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21837 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21838 add_bound_info (subrange_die, DW_AT_upper_bound,
21839 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21840 }
21841 else
21842 add_subscript_info (array_die, type, collapse_nested_arrays);
21843
21844 /* Add representation of the type of the elements of this array type and
21845 emit the corresponding DIE if we haven't done it already. */
21846 element_type = TREE_TYPE (type);
21847 if (collapse_nested_arrays)
21848 while (TREE_CODE (element_type) == ARRAY_TYPE)
21849 {
21850 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21851 break;
21852 element_type = TREE_TYPE (element_type);
21853 }
21854
21855 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21856 TREE_CODE (type) == ARRAY_TYPE
21857 && TYPE_REVERSE_STORAGE_ORDER (type),
21858 context_die);
21859
21860 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21861 if (TYPE_ARTIFICIAL (type))
21862 add_AT_flag (array_die, DW_AT_artificial, 1);
21863
21864 if (get_AT (array_die, DW_AT_name))
21865 add_pubtype (type, array_die);
21866
21867 add_alignment_attribute (array_die, type);
21868 }
21869
21870 /* This routine generates DIE for array with hidden descriptor, details
21871 are filled into *info by a langhook. */
21872
21873 static void
21874 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21875 dw_die_ref context_die)
21876 {
21877 const dw_die_ref scope_die = scope_die_for (type, context_die);
21878 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21879 struct loc_descr_context context = { type, info->base_decl, NULL,
21880 false, false };
21881 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21882 int dim;
21883
21884 add_name_attribute (array_die, type_tag (type));
21885 equate_type_number_to_die (type, array_die);
21886
21887 if (info->ndimensions > 1)
21888 switch (info->ordering)
21889 {
21890 case array_descr_ordering_row_major:
21891 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21892 break;
21893 case array_descr_ordering_column_major:
21894 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21895 break;
21896 default:
21897 break;
21898 }
21899
21900 if (dwarf_version >= 3 || !dwarf_strict)
21901 {
21902 if (info->data_location)
21903 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21904 dw_scalar_form_exprloc, &context);
21905 if (info->associated)
21906 add_scalar_info (array_die, DW_AT_associated, info->associated,
21907 dw_scalar_form_constant
21908 | dw_scalar_form_exprloc
21909 | dw_scalar_form_reference, &context);
21910 if (info->allocated)
21911 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21912 dw_scalar_form_constant
21913 | dw_scalar_form_exprloc
21914 | dw_scalar_form_reference, &context);
21915 if (info->stride)
21916 {
21917 const enum dwarf_attribute attr
21918 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21919 const int forms
21920 = (info->stride_in_bits)
21921 ? dw_scalar_form_constant
21922 : (dw_scalar_form_constant
21923 | dw_scalar_form_exprloc
21924 | dw_scalar_form_reference);
21925
21926 add_scalar_info (array_die, attr, info->stride, forms, &context);
21927 }
21928 }
21929 if (dwarf_version >= 5)
21930 {
21931 if (info->rank)
21932 {
21933 add_scalar_info (array_die, DW_AT_rank, info->rank,
21934 dw_scalar_form_constant
21935 | dw_scalar_form_exprloc, &context);
21936 subrange_tag = DW_TAG_generic_subrange;
21937 context.placeholder_arg = true;
21938 }
21939 }
21940
21941 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21942
21943 for (dim = 0; dim < info->ndimensions; dim++)
21944 {
21945 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21946
21947 if (info->dimen[dim].bounds_type)
21948 add_type_attribute (subrange_die,
21949 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21950 false, context_die);
21951 if (info->dimen[dim].lower_bound)
21952 add_bound_info (subrange_die, DW_AT_lower_bound,
21953 info->dimen[dim].lower_bound, &context);
21954 if (info->dimen[dim].upper_bound)
21955 add_bound_info (subrange_die, DW_AT_upper_bound,
21956 info->dimen[dim].upper_bound, &context);
21957 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21958 add_scalar_info (subrange_die, DW_AT_byte_stride,
21959 info->dimen[dim].stride,
21960 dw_scalar_form_constant
21961 | dw_scalar_form_exprloc
21962 | dw_scalar_form_reference,
21963 &context);
21964 }
21965
21966 gen_type_die (info->element_type, context_die);
21967 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21968 TREE_CODE (type) == ARRAY_TYPE
21969 && TYPE_REVERSE_STORAGE_ORDER (type),
21970 context_die);
21971
21972 if (get_AT (array_die, DW_AT_name))
21973 add_pubtype (type, array_die);
21974
21975 add_alignment_attribute (array_die, type);
21976 }
21977
21978 #if 0
21979 static void
21980 gen_entry_point_die (tree decl, dw_die_ref context_die)
21981 {
21982 tree origin = decl_ultimate_origin (decl);
21983 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21984
21985 if (origin != NULL)
21986 add_abstract_origin_attribute (decl_die, origin);
21987 else
21988 {
21989 add_name_and_src_coords_attributes (decl_die, decl);
21990 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21991 TYPE_UNQUALIFIED, false, context_die);
21992 }
21993
21994 if (DECL_ABSTRACT_P (decl))
21995 equate_decl_number_to_die (decl, decl_die);
21996 else
21997 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21998 }
21999 #endif
22000
22001 /* Walk through the list of incomplete types again, trying once more to
22002 emit full debugging info for them. */
22003
22004 static void
22005 retry_incomplete_types (void)
22006 {
22007 set_early_dwarf s;
22008 int i;
22009
22010 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22011 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22012 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22013 vec_safe_truncate (incomplete_types, 0);
22014 }
22015
22016 /* Determine what tag to use for a record type. */
22017
22018 static enum dwarf_tag
22019 record_type_tag (tree type)
22020 {
22021 if (! lang_hooks.types.classify_record)
22022 return DW_TAG_structure_type;
22023
22024 switch (lang_hooks.types.classify_record (type))
22025 {
22026 case RECORD_IS_STRUCT:
22027 return DW_TAG_structure_type;
22028
22029 case RECORD_IS_CLASS:
22030 return DW_TAG_class_type;
22031
22032 case RECORD_IS_INTERFACE:
22033 if (dwarf_version >= 3 || !dwarf_strict)
22034 return DW_TAG_interface_type;
22035 return DW_TAG_structure_type;
22036
22037 default:
22038 gcc_unreachable ();
22039 }
22040 }
22041
22042 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22043 include all of the information about the enumeration values also. Each
22044 enumerated type name/value is listed as a child of the enumerated type
22045 DIE. */
22046
22047 static dw_die_ref
22048 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22049 {
22050 dw_die_ref type_die = lookup_type_die (type);
22051 dw_die_ref orig_type_die = type_die;
22052
22053 if (type_die == NULL)
22054 {
22055 type_die = new_die (DW_TAG_enumeration_type,
22056 scope_die_for (type, context_die), type);
22057 equate_type_number_to_die (type, type_die);
22058 add_name_attribute (type_die, type_tag (type));
22059 if ((dwarf_version >= 4 || !dwarf_strict)
22060 && ENUM_IS_SCOPED (type))
22061 add_AT_flag (type_die, DW_AT_enum_class, 1);
22062 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22063 add_AT_flag (type_die, DW_AT_declaration, 1);
22064 if (!dwarf_strict)
22065 add_AT_unsigned (type_die, DW_AT_encoding,
22066 TYPE_UNSIGNED (type)
22067 ? DW_ATE_unsigned
22068 : DW_ATE_signed);
22069 }
22070 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22071 return type_die;
22072 else
22073 remove_AT (type_die, DW_AT_declaration);
22074
22075 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22076 given enum type is incomplete, do not generate the DW_AT_byte_size
22077 attribute or the DW_AT_element_list attribute. */
22078 if (TYPE_SIZE (type))
22079 {
22080 tree link;
22081
22082 if (!ENUM_IS_OPAQUE (type))
22083 TREE_ASM_WRITTEN (type) = 1;
22084 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22085 add_byte_size_attribute (type_die, type);
22086 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22087 add_alignment_attribute (type_die, type);
22088 if ((dwarf_version >= 3 || !dwarf_strict)
22089 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22090 {
22091 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22092 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22093 context_die);
22094 }
22095 if (TYPE_STUB_DECL (type) != NULL_TREE)
22096 {
22097 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22098 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22099 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22100 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22101 }
22102
22103 /* If the first reference to this type was as the return type of an
22104 inline function, then it may not have a parent. Fix this now. */
22105 if (type_die->die_parent == NULL)
22106 add_child_die (scope_die_for (type, context_die), type_die);
22107
22108 for (link = TYPE_VALUES (type);
22109 link != NULL; link = TREE_CHAIN (link))
22110 {
22111 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22112 tree value = TREE_VALUE (link);
22113
22114 gcc_assert (!ENUM_IS_OPAQUE (type));
22115 add_name_attribute (enum_die,
22116 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22117
22118 if (TREE_CODE (value) == CONST_DECL)
22119 value = DECL_INITIAL (value);
22120
22121 if (simple_type_size_in_bits (TREE_TYPE (value))
22122 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22123 {
22124 /* For constant forms created by add_AT_unsigned DWARF
22125 consumers (GDB, elfutils, etc.) always zero extend
22126 the value. Only when the actual value is negative
22127 do we need to use add_AT_int to generate a constant
22128 form that can represent negative values. */
22129 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22130 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22131 add_AT_unsigned (enum_die, DW_AT_const_value,
22132 (unsigned HOST_WIDE_INT) val);
22133 else
22134 add_AT_int (enum_die, DW_AT_const_value, val);
22135 }
22136 else
22137 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22138 that here. TODO: This should be re-worked to use correct
22139 signed/unsigned double tags for all cases. */
22140 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22141 }
22142
22143 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22144 if (TYPE_ARTIFICIAL (type)
22145 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22146 add_AT_flag (type_die, DW_AT_artificial, 1);
22147 }
22148 else
22149 add_AT_flag (type_die, DW_AT_declaration, 1);
22150
22151 add_pubtype (type, type_die);
22152
22153 return type_die;
22154 }
22155
22156 /* Generate a DIE to represent either a real live formal parameter decl or to
22157 represent just the type of some formal parameter position in some function
22158 type.
22159
22160 Note that this routine is a bit unusual because its argument may be a
22161 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22162 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22163 node. If it's the former then this function is being called to output a
22164 DIE to represent a formal parameter object (or some inlining thereof). If
22165 it's the latter, then this function is only being called to output a
22166 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22167 argument type of some subprogram type.
22168 If EMIT_NAME_P is true, name and source coordinate attributes
22169 are emitted. */
22170
22171 static dw_die_ref
22172 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22173 dw_die_ref context_die)
22174 {
22175 tree node_or_origin = node ? node : origin;
22176 tree ultimate_origin;
22177 dw_die_ref parm_die = NULL;
22178
22179 if (DECL_P (node_or_origin))
22180 {
22181 parm_die = lookup_decl_die (node);
22182
22183 /* If the contexts differ, we may not be talking about the same
22184 thing.
22185 ??? When in LTO the DIE parent is the "abstract" copy and the
22186 context_die is the specification "copy". */
22187 if (parm_die
22188 && parm_die->die_parent != context_die
22189 && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack
22190 || parm_die->die_parent->die_parent != context_die)
22191 && !in_lto_p)
22192 {
22193 gcc_assert (!DECL_ABSTRACT_P (node));
22194 /* This can happen when creating a concrete instance, in
22195 which case we need to create a new DIE that will get
22196 annotated with DW_AT_abstract_origin. */
22197 parm_die = NULL;
22198 }
22199
22200 if (parm_die && parm_die->die_parent == NULL)
22201 {
22202 /* Check that parm_die already has the right attributes that
22203 we would have added below. If any attributes are
22204 missing, fall through to add them. */
22205 if (! DECL_ABSTRACT_P (node_or_origin)
22206 && !get_AT (parm_die, DW_AT_location)
22207 && !get_AT (parm_die, DW_AT_const_value))
22208 /* We are missing location info, and are about to add it. */
22209 ;
22210 else
22211 {
22212 add_child_die (context_die, parm_die);
22213 return parm_die;
22214 }
22215 }
22216 }
22217
22218 /* If we have a previously generated DIE, use it, unless this is an
22219 concrete instance (origin != NULL), in which case we need a new
22220 DIE with a corresponding DW_AT_abstract_origin. */
22221 bool reusing_die;
22222 if (parm_die && origin == NULL)
22223 reusing_die = true;
22224 else
22225 {
22226 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22227 reusing_die = false;
22228 }
22229
22230 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22231 {
22232 case tcc_declaration:
22233 ultimate_origin = decl_ultimate_origin (node_or_origin);
22234 if (node || ultimate_origin)
22235 origin = ultimate_origin;
22236
22237 if (reusing_die)
22238 goto add_location;
22239
22240 if (origin != NULL)
22241 add_abstract_origin_attribute (parm_die, origin);
22242 else if (emit_name_p)
22243 add_name_and_src_coords_attributes (parm_die, node);
22244 if (origin == NULL
22245 || (! DECL_ABSTRACT_P (node_or_origin)
22246 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22247 decl_function_context
22248 (node_or_origin))))
22249 {
22250 tree type = TREE_TYPE (node_or_origin);
22251 if (decl_by_reference_p (node_or_origin))
22252 add_type_attribute (parm_die, TREE_TYPE (type),
22253 TYPE_UNQUALIFIED,
22254 false, context_die);
22255 else
22256 add_type_attribute (parm_die, type,
22257 decl_quals (node_or_origin),
22258 false, context_die);
22259 }
22260 if (origin == NULL && DECL_ARTIFICIAL (node))
22261 add_AT_flag (parm_die, DW_AT_artificial, 1);
22262 add_location:
22263 if (node && node != origin)
22264 equate_decl_number_to_die (node, parm_die);
22265 if (! DECL_ABSTRACT_P (node_or_origin))
22266 add_location_or_const_value_attribute (parm_die, node_or_origin,
22267 node == NULL);
22268
22269 break;
22270
22271 case tcc_type:
22272 /* We were called with some kind of a ..._TYPE node. */
22273 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22274 context_die);
22275 break;
22276
22277 default:
22278 gcc_unreachable ();
22279 }
22280
22281 return parm_die;
22282 }
22283
22284 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22285 children DW_TAG_formal_parameter DIEs representing the arguments of the
22286 parameter pack.
22287
22288 PARM_PACK must be a function parameter pack.
22289 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22290 must point to the subsequent arguments of the function PACK_ARG belongs to.
22291 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22292 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22293 following the last one for which a DIE was generated. */
22294
22295 static dw_die_ref
22296 gen_formal_parameter_pack_die (tree parm_pack,
22297 tree pack_arg,
22298 dw_die_ref subr_die,
22299 tree *next_arg)
22300 {
22301 tree arg;
22302 dw_die_ref parm_pack_die;
22303
22304 gcc_assert (parm_pack
22305 && lang_hooks.function_parameter_pack_p (parm_pack)
22306 && subr_die);
22307
22308 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22309 add_src_coords_attributes (parm_pack_die, parm_pack);
22310
22311 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22312 {
22313 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22314 parm_pack))
22315 break;
22316 gen_formal_parameter_die (arg, NULL,
22317 false /* Don't emit name attribute. */,
22318 parm_pack_die);
22319 }
22320 if (next_arg)
22321 *next_arg = arg;
22322 return parm_pack_die;
22323 }
22324
22325 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22326 at the end of an (ANSI prototyped) formal parameters list. */
22327
22328 static void
22329 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22330 {
22331 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22332 }
22333
22334 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22335 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22336 parameters as specified in some function type specification (except for
22337 those which appear as part of a function *definition*). */
22338
22339 static void
22340 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22341 {
22342 tree link;
22343 tree formal_type = NULL;
22344 tree first_parm_type;
22345 tree arg;
22346
22347 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22348 {
22349 arg = DECL_ARGUMENTS (function_or_method_type);
22350 function_or_method_type = TREE_TYPE (function_or_method_type);
22351 }
22352 else
22353 arg = NULL_TREE;
22354
22355 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22356
22357 /* Make our first pass over the list of formal parameter types and output a
22358 DW_TAG_formal_parameter DIE for each one. */
22359 for (link = first_parm_type; link; )
22360 {
22361 dw_die_ref parm_die;
22362
22363 formal_type = TREE_VALUE (link);
22364 if (formal_type == void_type_node)
22365 break;
22366
22367 /* Output a (nameless) DIE to represent the formal parameter itself. */
22368 parm_die = gen_formal_parameter_die (formal_type, NULL,
22369 true /* Emit name attribute. */,
22370 context_die);
22371 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22372 && link == first_parm_type)
22373 {
22374 add_AT_flag (parm_die, DW_AT_artificial, 1);
22375 if (dwarf_version >= 3 || !dwarf_strict)
22376 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22377 }
22378 else if (arg && DECL_ARTIFICIAL (arg))
22379 add_AT_flag (parm_die, DW_AT_artificial, 1);
22380
22381 link = TREE_CHAIN (link);
22382 if (arg)
22383 arg = DECL_CHAIN (arg);
22384 }
22385
22386 /* If this function type has an ellipsis, add a
22387 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22388 if (formal_type != void_type_node)
22389 gen_unspecified_parameters_die (function_or_method_type, context_die);
22390
22391 /* Make our second (and final) pass over the list of formal parameter types
22392 and output DIEs to represent those types (as necessary). */
22393 for (link = TYPE_ARG_TYPES (function_or_method_type);
22394 link && TREE_VALUE (link);
22395 link = TREE_CHAIN (link))
22396 gen_type_die (TREE_VALUE (link), context_die);
22397 }
22398
22399 /* We want to generate the DIE for TYPE so that we can generate the
22400 die for MEMBER, which has been defined; we will need to refer back
22401 to the member declaration nested within TYPE. If we're trying to
22402 generate minimal debug info for TYPE, processing TYPE won't do the
22403 trick; we need to attach the member declaration by hand. */
22404
22405 static void
22406 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22407 {
22408 gen_type_die (type, context_die);
22409
22410 /* If we're trying to avoid duplicate debug info, we may not have
22411 emitted the member decl for this function. Emit it now. */
22412 if (TYPE_STUB_DECL (type)
22413 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22414 && ! lookup_decl_die (member))
22415 {
22416 dw_die_ref type_die;
22417 gcc_assert (!decl_ultimate_origin (member));
22418
22419 type_die = lookup_type_die_strip_naming_typedef (type);
22420 if (TREE_CODE (member) == FUNCTION_DECL)
22421 gen_subprogram_die (member, type_die);
22422 else if (TREE_CODE (member) == FIELD_DECL)
22423 {
22424 /* Ignore the nameless fields that are used to skip bits but handle
22425 C++ anonymous unions and structs. */
22426 if (DECL_NAME (member) != NULL_TREE
22427 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22428 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22429 {
22430 struct vlr_context vlr_ctx = {
22431 DECL_CONTEXT (member), /* struct_type */
22432 NULL_TREE /* variant_part_offset */
22433 };
22434 gen_type_die (member_declared_type (member), type_die);
22435 gen_field_die (member, &vlr_ctx, type_die);
22436 }
22437 }
22438 else
22439 gen_variable_die (member, NULL_TREE, type_die);
22440 }
22441 }
22442 \f
22443 /* Forward declare these functions, because they are mutually recursive
22444 with their set_block_* pairing functions. */
22445 static void set_decl_origin_self (tree);
22446
22447 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22448 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22449 that it points to the node itself, thus indicating that the node is its
22450 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22451 the given node is NULL, recursively descend the decl/block tree which
22452 it is the root of, and for each other ..._DECL or BLOCK node contained
22453 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22454 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22455 values to point to themselves. */
22456
22457 static void
22458 set_block_origin_self (tree stmt)
22459 {
22460 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22461 {
22462 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22463
22464 {
22465 tree local_decl;
22466
22467 for (local_decl = BLOCK_VARS (stmt);
22468 local_decl != NULL_TREE;
22469 local_decl = DECL_CHAIN (local_decl))
22470 /* Do not recurse on nested functions since the inlining status
22471 of parent and child can be different as per the DWARF spec. */
22472 if (TREE_CODE (local_decl) != FUNCTION_DECL
22473 && !DECL_EXTERNAL (local_decl))
22474 set_decl_origin_self (local_decl);
22475 }
22476
22477 {
22478 tree subblock;
22479
22480 for (subblock = BLOCK_SUBBLOCKS (stmt);
22481 subblock != NULL_TREE;
22482 subblock = BLOCK_CHAIN (subblock))
22483 set_block_origin_self (subblock); /* Recurse. */
22484 }
22485 }
22486 }
22487
22488 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22489 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22490 node to so that it points to the node itself, thus indicating that the
22491 node represents its own (abstract) origin. Additionally, if the
22492 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22493 the decl/block tree of which the given node is the root of, and for
22494 each other ..._DECL or BLOCK node contained therein whose
22495 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22496 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22497 point to themselves. */
22498
22499 static void
22500 set_decl_origin_self (tree decl)
22501 {
22502 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22503 {
22504 DECL_ABSTRACT_ORIGIN (decl) = decl;
22505 if (TREE_CODE (decl) == FUNCTION_DECL)
22506 {
22507 tree arg;
22508
22509 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22510 DECL_ABSTRACT_ORIGIN (arg) = arg;
22511 if (DECL_INITIAL (decl) != NULL_TREE
22512 && DECL_INITIAL (decl) != error_mark_node)
22513 set_block_origin_self (DECL_INITIAL (decl));
22514 }
22515 }
22516 }
22517 \f
22518 /* Mark the early DIE for DECL as the abstract instance. */
22519
22520 static void
22521 dwarf2out_abstract_function (tree decl)
22522 {
22523 dw_die_ref old_die;
22524
22525 /* Make sure we have the actual abstract inline, not a clone. */
22526 decl = DECL_ORIGIN (decl);
22527
22528 if (DECL_IGNORED_P (decl))
22529 return;
22530
22531 /* In LTO we're all set. We already created abstract instances
22532 early and we want to avoid creating a concrete instance of that
22533 if we don't output it. */
22534 if (in_lto_p)
22535 return;
22536
22537 old_die = lookup_decl_die (decl);
22538 gcc_assert (old_die != NULL);
22539 if (get_AT (old_die, DW_AT_inline))
22540 /* We've already generated the abstract instance. */
22541 return;
22542
22543 /* Go ahead and put DW_AT_inline on the DIE. */
22544 if (DECL_DECLARED_INLINE_P (decl))
22545 {
22546 if (cgraph_function_possibly_inlined_p (decl))
22547 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22548 else
22549 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22550 }
22551 else
22552 {
22553 if (cgraph_function_possibly_inlined_p (decl))
22554 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22555 else
22556 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22557 }
22558
22559 if (DECL_DECLARED_INLINE_P (decl)
22560 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22561 add_AT_flag (old_die, DW_AT_artificial, 1);
22562
22563 set_decl_origin_self (decl);
22564 }
22565
22566 /* Helper function of premark_used_types() which gets called through
22567 htab_traverse.
22568
22569 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22570 marked as unused by prune_unused_types. */
22571
22572 bool
22573 premark_used_types_helper (tree const &type, void *)
22574 {
22575 dw_die_ref die;
22576
22577 die = lookup_type_die (type);
22578 if (die != NULL)
22579 die->die_perennial_p = 1;
22580 return true;
22581 }
22582
22583 /* Helper function of premark_types_used_by_global_vars which gets called
22584 through htab_traverse.
22585
22586 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22587 marked as unused by prune_unused_types. The DIE of the type is marked
22588 only if the global variable using the type will actually be emitted. */
22589
22590 int
22591 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22592 void *)
22593 {
22594 struct types_used_by_vars_entry *entry;
22595 dw_die_ref die;
22596
22597 entry = (struct types_used_by_vars_entry *) *slot;
22598 gcc_assert (entry->type != NULL
22599 && entry->var_decl != NULL);
22600 die = lookup_type_die (entry->type);
22601 if (die)
22602 {
22603 /* Ask cgraph if the global variable really is to be emitted.
22604 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22605 varpool_node *node = varpool_node::get (entry->var_decl);
22606 if (node && node->definition)
22607 {
22608 die->die_perennial_p = 1;
22609 /* Keep the parent DIEs as well. */
22610 while ((die = die->die_parent) && die->die_perennial_p == 0)
22611 die->die_perennial_p = 1;
22612 }
22613 }
22614 return 1;
22615 }
22616
22617 /* Mark all members of used_types_hash as perennial. */
22618
22619 static void
22620 premark_used_types (struct function *fun)
22621 {
22622 if (fun && fun->used_types_hash)
22623 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22624 }
22625
22626 /* Mark all members of types_used_by_vars_entry as perennial. */
22627
22628 static void
22629 premark_types_used_by_global_vars (void)
22630 {
22631 if (types_used_by_vars_hash)
22632 types_used_by_vars_hash
22633 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22634 }
22635
22636 /* Mark all variables used by the symtab as perennial. */
22637
22638 static void
22639 premark_used_variables (void)
22640 {
22641 /* Mark DIEs in the symtab as used. */
22642 varpool_node *var;
22643 FOR_EACH_VARIABLE (var)
22644 {
22645 dw_die_ref die = lookup_decl_die (var->decl);
22646 if (die)
22647 die->die_perennial_p = 1;
22648 }
22649 }
22650
22651 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22652 for CA_LOC call arg loc node. */
22653
22654 static dw_die_ref
22655 gen_call_site_die (tree decl, dw_die_ref subr_die,
22656 struct call_arg_loc_node *ca_loc)
22657 {
22658 dw_die_ref stmt_die = NULL, die;
22659 tree block = ca_loc->block;
22660
22661 while (block
22662 && block != DECL_INITIAL (decl)
22663 && TREE_CODE (block) == BLOCK)
22664 {
22665 stmt_die = lookup_block_die (block);
22666 if (stmt_die)
22667 break;
22668 block = BLOCK_SUPERCONTEXT (block);
22669 }
22670 if (stmt_die == NULL)
22671 stmt_die = subr_die;
22672 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22673 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22674 if (ca_loc->tail_call_p)
22675 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22676 if (ca_loc->symbol_ref)
22677 {
22678 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22679 if (tdie)
22680 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22681 else
22682 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22683 false);
22684 }
22685 return die;
22686 }
22687
22688 /* Generate a DIE to represent a declared function (either file-scope or
22689 block-local). */
22690
22691 static void
22692 gen_subprogram_die (tree decl, dw_die_ref context_die)
22693 {
22694 tree origin = decl_ultimate_origin (decl);
22695 dw_die_ref subr_die;
22696 dw_die_ref old_die = lookup_decl_die (decl);
22697
22698 /* This function gets called multiple times for different stages of
22699 the debug process. For example, for func() in this code:
22700
22701 namespace S
22702 {
22703 void func() { ... }
22704 }
22705
22706 ...we get called 4 times. Twice in early debug and twice in
22707 late debug:
22708
22709 Early debug
22710 -----------
22711
22712 1. Once while generating func() within the namespace. This is
22713 the declaration. The declaration bit below is set, as the
22714 context is the namespace.
22715
22716 A new DIE will be generated with DW_AT_declaration set.
22717
22718 2. Once for func() itself. This is the specification. The
22719 declaration bit below is clear as the context is the CU.
22720
22721 We will use the cached DIE from (1) to create a new DIE with
22722 DW_AT_specification pointing to the declaration in (1).
22723
22724 Late debug via rest_of_handle_final()
22725 -------------------------------------
22726
22727 3. Once generating func() within the namespace. This is also the
22728 declaration, as in (1), but this time we will early exit below
22729 as we have a cached DIE and a declaration needs no additional
22730 annotations (no locations), as the source declaration line
22731 info is enough.
22732
22733 4. Once for func() itself. As in (2), this is the specification,
22734 but this time we will re-use the cached DIE, and just annotate
22735 it with the location information that should now be available.
22736
22737 For something without namespaces, but with abstract instances, we
22738 are also called a multiple times:
22739
22740 class Base
22741 {
22742 public:
22743 Base (); // constructor declaration (1)
22744 };
22745
22746 Base::Base () { } // constructor specification (2)
22747
22748 Early debug
22749 -----------
22750
22751 1. Once for the Base() constructor by virtue of it being a
22752 member of the Base class. This is done via
22753 rest_of_type_compilation.
22754
22755 This is a declaration, so a new DIE will be created with
22756 DW_AT_declaration.
22757
22758 2. Once for the Base() constructor definition, but this time
22759 while generating the abstract instance of the base
22760 constructor (__base_ctor) which is being generated via early
22761 debug of reachable functions.
22762
22763 Even though we have a cached version of the declaration (1),
22764 we will create a DW_AT_specification of the declaration DIE
22765 in (1).
22766
22767 3. Once for the __base_ctor itself, but this time, we generate
22768 an DW_AT_abstract_origin version of the DW_AT_specification in
22769 (2).
22770
22771 Late debug via rest_of_handle_final
22772 -----------------------------------
22773
22774 4. One final time for the __base_ctor (which will have a cached
22775 DIE with DW_AT_abstract_origin created in (3). This time,
22776 we will just annotate the location information now
22777 available.
22778 */
22779 int declaration = (current_function_decl != decl
22780 || class_or_namespace_scope_p (context_die));
22781
22782 /* A declaration that has been previously dumped needs no
22783 additional information. */
22784 if (old_die && declaration)
22785 return;
22786
22787 /* Now that the C++ front end lazily declares artificial member fns, we
22788 might need to retrofit the declaration into its class. */
22789 if (!declaration && !origin && !old_die
22790 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22791 && !class_or_namespace_scope_p (context_die)
22792 && debug_info_level > DINFO_LEVEL_TERSE)
22793 old_die = force_decl_die (decl);
22794
22795 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22796 if (origin != NULL)
22797 {
22798 gcc_assert (!declaration || local_scope_p (context_die));
22799
22800 /* Fixup die_parent for the abstract instance of a nested
22801 inline function. */
22802 if (old_die && old_die->die_parent == NULL)
22803 add_child_die (context_die, old_die);
22804
22805 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22806 {
22807 /* If we have a DW_AT_abstract_origin we have a working
22808 cached version. */
22809 subr_die = old_die;
22810 }
22811 else
22812 {
22813 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22814 add_abstract_origin_attribute (subr_die, origin);
22815 /* This is where the actual code for a cloned function is.
22816 Let's emit linkage name attribute for it. This helps
22817 debuggers to e.g, set breakpoints into
22818 constructors/destructors when the user asks "break
22819 K::K". */
22820 add_linkage_name (subr_die, decl);
22821 }
22822 }
22823 /* A cached copy, possibly from early dwarf generation. Reuse as
22824 much as possible. */
22825 else if (old_die)
22826 {
22827 if (!get_AT_flag (old_die, DW_AT_declaration)
22828 /* We can have a normal definition following an inline one in the
22829 case of redefinition of GNU C extern inlines.
22830 It seems reasonable to use AT_specification in this case. */
22831 && !get_AT (old_die, DW_AT_inline))
22832 {
22833 /* Detect and ignore this case, where we are trying to output
22834 something we have already output. */
22835 if (get_AT (old_die, DW_AT_low_pc)
22836 || get_AT (old_die, DW_AT_ranges))
22837 return;
22838
22839 /* If we have no location information, this must be a
22840 partially generated DIE from early dwarf generation.
22841 Fall through and generate it. */
22842 }
22843
22844 /* If the definition comes from the same place as the declaration,
22845 maybe use the old DIE. We always want the DIE for this function
22846 that has the *_pc attributes to be under comp_unit_die so the
22847 debugger can find it. We also need to do this for abstract
22848 instances of inlines, since the spec requires the out-of-line copy
22849 to have the same parent. For local class methods, this doesn't
22850 apply; we just use the old DIE. */
22851 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22852 struct dwarf_file_data * file_index = lookup_filename (s.file);
22853 if (((is_unit_die (old_die->die_parent)
22854 /* This condition fixes the inconsistency/ICE with the
22855 following Fortran test (or some derivative thereof) while
22856 building libgfortran:
22857
22858 module some_m
22859 contains
22860 logical function funky (FLAG)
22861 funky = .true.
22862 end function
22863 end module
22864 */
22865 || (old_die->die_parent
22866 && old_die->die_parent->die_tag == DW_TAG_module)
22867 || local_scope_p (old_die->die_parent)
22868 || context_die == NULL)
22869 && (DECL_ARTIFICIAL (decl)
22870 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22871 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22872 == (unsigned) s.line)
22873 && (!debug_column_info
22874 || s.column == 0
22875 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22876 == (unsigned) s.column)))))
22877 /* With LTO if there's an abstract instance for
22878 the old DIE, this is a concrete instance and
22879 thus re-use the DIE. */
22880 || get_AT (old_die, DW_AT_abstract_origin))
22881 {
22882 subr_die = old_die;
22883
22884 /* Clear out the declaration attribute, but leave the
22885 parameters so they can be augmented with location
22886 information later. Unless this was a declaration, in
22887 which case, wipe out the nameless parameters and recreate
22888 them further down. */
22889 if (remove_AT (subr_die, DW_AT_declaration))
22890 {
22891
22892 remove_AT (subr_die, DW_AT_object_pointer);
22893 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22894 }
22895 }
22896 /* Make a specification pointing to the previously built
22897 declaration. */
22898 else
22899 {
22900 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22901 add_AT_specification (subr_die, old_die);
22902 add_pubname (decl, subr_die);
22903 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22904 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22905 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22906 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22907 if (debug_column_info
22908 && s.column
22909 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22910 != (unsigned) s.column))
22911 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22912
22913 /* If the prototype had an 'auto' or 'decltype(auto)' in
22914 the return type, emit the real type on the definition die. */
22915 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22916 {
22917 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22918 while (die
22919 && (die->die_tag == DW_TAG_reference_type
22920 || die->die_tag == DW_TAG_rvalue_reference_type
22921 || die->die_tag == DW_TAG_pointer_type
22922 || die->die_tag == DW_TAG_const_type
22923 || die->die_tag == DW_TAG_volatile_type
22924 || die->die_tag == DW_TAG_restrict_type
22925 || die->die_tag == DW_TAG_array_type
22926 || die->die_tag == DW_TAG_ptr_to_member_type
22927 || die->die_tag == DW_TAG_subroutine_type))
22928 die = get_AT_ref (die, DW_AT_type);
22929 if (die == auto_die || die == decltype_auto_die)
22930 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22931 TYPE_UNQUALIFIED, false, context_die);
22932 }
22933
22934 /* When we process the method declaration, we haven't seen
22935 the out-of-class defaulted definition yet, so we have to
22936 recheck now. */
22937 if ((dwarf_version >= 5 || ! dwarf_strict)
22938 && !get_AT (subr_die, DW_AT_defaulted))
22939 {
22940 int defaulted
22941 = lang_hooks.decls.decl_dwarf_attribute (decl,
22942 DW_AT_defaulted);
22943 if (defaulted != -1)
22944 {
22945 /* Other values must have been handled before. */
22946 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22947 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22948 }
22949 }
22950 }
22951 }
22952 /* Create a fresh DIE for anything else. */
22953 else
22954 {
22955 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22956
22957 if (TREE_PUBLIC (decl))
22958 add_AT_flag (subr_die, DW_AT_external, 1);
22959
22960 add_name_and_src_coords_attributes (subr_die, decl);
22961 add_pubname (decl, subr_die);
22962 if (debug_info_level > DINFO_LEVEL_TERSE)
22963 {
22964 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22965 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22966 TYPE_UNQUALIFIED, false, context_die);
22967 }
22968
22969 add_pure_or_virtual_attribute (subr_die, decl);
22970 if (DECL_ARTIFICIAL (decl))
22971 add_AT_flag (subr_die, DW_AT_artificial, 1);
22972
22973 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22974 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22975
22976 add_alignment_attribute (subr_die, decl);
22977
22978 add_accessibility_attribute (subr_die, decl);
22979 }
22980
22981 /* Unless we have an existing non-declaration DIE, equate the new
22982 DIE. */
22983 if (!old_die || is_declaration_die (old_die))
22984 equate_decl_number_to_die (decl, subr_die);
22985
22986 if (declaration)
22987 {
22988 if (!old_die || !get_AT (old_die, DW_AT_inline))
22989 {
22990 add_AT_flag (subr_die, DW_AT_declaration, 1);
22991
22992 /* If this is an explicit function declaration then generate
22993 a DW_AT_explicit attribute. */
22994 if ((dwarf_version >= 3 || !dwarf_strict)
22995 && lang_hooks.decls.decl_dwarf_attribute (decl,
22996 DW_AT_explicit) == 1)
22997 add_AT_flag (subr_die, DW_AT_explicit, 1);
22998
22999 /* If this is a C++11 deleted special function member then generate
23000 a DW_AT_deleted attribute. */
23001 if ((dwarf_version >= 5 || !dwarf_strict)
23002 && lang_hooks.decls.decl_dwarf_attribute (decl,
23003 DW_AT_deleted) == 1)
23004 add_AT_flag (subr_die, DW_AT_deleted, 1);
23005
23006 /* If this is a C++11 defaulted special function member then
23007 generate a DW_AT_defaulted attribute. */
23008 if (dwarf_version >= 5 || !dwarf_strict)
23009 {
23010 int defaulted
23011 = lang_hooks.decls.decl_dwarf_attribute (decl,
23012 DW_AT_defaulted);
23013 if (defaulted != -1)
23014 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23015 }
23016
23017 /* If this is a C++11 non-static member function with & ref-qualifier
23018 then generate a DW_AT_reference attribute. */
23019 if ((dwarf_version >= 5 || !dwarf_strict)
23020 && lang_hooks.decls.decl_dwarf_attribute (decl,
23021 DW_AT_reference) == 1)
23022 add_AT_flag (subr_die, DW_AT_reference, 1);
23023
23024 /* If this is a C++11 non-static member function with &&
23025 ref-qualifier then generate a DW_AT_reference attribute. */
23026 if ((dwarf_version >= 5 || !dwarf_strict)
23027 && lang_hooks.decls.decl_dwarf_attribute (decl,
23028 DW_AT_rvalue_reference)
23029 == 1)
23030 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23031 }
23032 }
23033 /* For non DECL_EXTERNALs, if range information is available, fill
23034 the DIE with it. */
23035 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23036 {
23037 HOST_WIDE_INT cfa_fb_offset;
23038
23039 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23040
23041 if (!crtl->has_bb_partition)
23042 {
23043 dw_fde_ref fde = fun->fde;
23044 if (fde->dw_fde_begin)
23045 {
23046 /* We have already generated the labels. */
23047 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23048 fde->dw_fde_end, false);
23049 }
23050 else
23051 {
23052 /* Create start/end labels and add the range. */
23053 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23054 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23055 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23056 current_function_funcdef_no);
23057 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23058 current_function_funcdef_no);
23059 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23060 false);
23061 }
23062
23063 #if VMS_DEBUGGING_INFO
23064 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23065 Section 2.3 Prologue and Epilogue Attributes:
23066 When a breakpoint is set on entry to a function, it is generally
23067 desirable for execution to be suspended, not on the very first
23068 instruction of the function, but rather at a point after the
23069 function's frame has been set up, after any language defined local
23070 declaration processing has been completed, and before execution of
23071 the first statement of the function begins. Debuggers generally
23072 cannot properly determine where this point is. Similarly for a
23073 breakpoint set on exit from a function. The prologue and epilogue
23074 attributes allow a compiler to communicate the location(s) to use. */
23075
23076 {
23077 if (fde->dw_fde_vms_end_prologue)
23078 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23079 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23080
23081 if (fde->dw_fde_vms_begin_epilogue)
23082 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23083 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23084 }
23085 #endif
23086
23087 }
23088 else
23089 {
23090 /* Generate pubnames entries for the split function code ranges. */
23091 dw_fde_ref fde = fun->fde;
23092
23093 if (fde->dw_fde_second_begin)
23094 {
23095 if (dwarf_version >= 3 || !dwarf_strict)
23096 {
23097 /* We should use ranges for non-contiguous code section
23098 addresses. Use the actual code range for the initial
23099 section, since the HOT/COLD labels might precede an
23100 alignment offset. */
23101 bool range_list_added = false;
23102 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23103 fde->dw_fde_end, &range_list_added,
23104 false);
23105 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23106 fde->dw_fde_second_end,
23107 &range_list_added, false);
23108 if (range_list_added)
23109 add_ranges (NULL);
23110 }
23111 else
23112 {
23113 /* There is no real support in DW2 for this .. so we make
23114 a work-around. First, emit the pub name for the segment
23115 containing the function label. Then make and emit a
23116 simplified subprogram DIE for the second segment with the
23117 name pre-fixed by __hot/cold_sect_of_. We use the same
23118 linkage name for the second die so that gdb will find both
23119 sections when given "b foo". */
23120 const char *name = NULL;
23121 tree decl_name = DECL_NAME (decl);
23122 dw_die_ref seg_die;
23123
23124 /* Do the 'primary' section. */
23125 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23126 fde->dw_fde_end, false);
23127
23128 /* Build a minimal DIE for the secondary section. */
23129 seg_die = new_die (DW_TAG_subprogram,
23130 subr_die->die_parent, decl);
23131
23132 if (TREE_PUBLIC (decl))
23133 add_AT_flag (seg_die, DW_AT_external, 1);
23134
23135 if (decl_name != NULL
23136 && IDENTIFIER_POINTER (decl_name) != NULL)
23137 {
23138 name = dwarf2_name (decl, 1);
23139 if (! DECL_ARTIFICIAL (decl))
23140 add_src_coords_attributes (seg_die, decl);
23141
23142 add_linkage_name (seg_die, decl);
23143 }
23144 gcc_assert (name != NULL);
23145 add_pure_or_virtual_attribute (seg_die, decl);
23146 if (DECL_ARTIFICIAL (decl))
23147 add_AT_flag (seg_die, DW_AT_artificial, 1);
23148
23149 name = concat ("__second_sect_of_", name, NULL);
23150 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23151 fde->dw_fde_second_end, false);
23152 add_name_attribute (seg_die, name);
23153 if (want_pubnames ())
23154 add_pubname_string (name, seg_die);
23155 }
23156 }
23157 else
23158 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23159 false);
23160 }
23161
23162 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23163
23164 /* We define the "frame base" as the function's CFA. This is more
23165 convenient for several reasons: (1) It's stable across the prologue
23166 and epilogue, which makes it better than just a frame pointer,
23167 (2) With dwarf3, there exists a one-byte encoding that allows us
23168 to reference the .debug_frame data by proxy, but failing that,
23169 (3) We can at least reuse the code inspection and interpretation
23170 code that determines the CFA position at various points in the
23171 function. */
23172 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23173 {
23174 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23175 add_AT_loc (subr_die, DW_AT_frame_base, op);
23176 }
23177 else
23178 {
23179 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23180 if (list->dw_loc_next)
23181 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23182 else
23183 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23184 }
23185
23186 /* Compute a displacement from the "steady-state frame pointer" to
23187 the CFA. The former is what all stack slots and argument slots
23188 will reference in the rtl; the latter is what we've told the
23189 debugger about. We'll need to adjust all frame_base references
23190 by this displacement. */
23191 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23192
23193 if (fun->static_chain_decl)
23194 {
23195 /* DWARF requires here a location expression that computes the
23196 address of the enclosing subprogram's frame base. The machinery
23197 in tree-nested.c is supposed to store this specific address in the
23198 last field of the FRAME record. */
23199 const tree frame_type
23200 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23201 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23202
23203 tree fb_expr
23204 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23205 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23206 fb_expr, fb_decl, NULL_TREE);
23207
23208 add_AT_location_description (subr_die, DW_AT_static_link,
23209 loc_list_from_tree (fb_expr, 0, NULL));
23210 }
23211
23212 resolve_variable_values ();
23213 }
23214
23215 /* Generate child dies for template paramaters. */
23216 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23217 gen_generic_params_dies (decl);
23218
23219 /* Now output descriptions of the arguments for this function. This gets
23220 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23221 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23222 `...' at the end of the formal parameter list. In order to find out if
23223 there was a trailing ellipsis or not, we must instead look at the type
23224 associated with the FUNCTION_DECL. This will be a node of type
23225 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23226 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23227 an ellipsis at the end. */
23228
23229 /* In the case where we are describing a mere function declaration, all we
23230 need to do here (and all we *can* do here) is to describe the *types* of
23231 its formal parameters. */
23232 if (debug_info_level <= DINFO_LEVEL_TERSE)
23233 ;
23234 else if (declaration)
23235 gen_formal_types_die (decl, subr_die);
23236 else
23237 {
23238 /* Generate DIEs to represent all known formal parameters. */
23239 tree parm = DECL_ARGUMENTS (decl);
23240 tree generic_decl = early_dwarf
23241 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23242 tree generic_decl_parm = generic_decl
23243 ? DECL_ARGUMENTS (generic_decl)
23244 : NULL;
23245
23246 /* Now we want to walk the list of parameters of the function and
23247 emit their relevant DIEs.
23248
23249 We consider the case of DECL being an instance of a generic function
23250 as well as it being a normal function.
23251
23252 If DECL is an instance of a generic function we walk the
23253 parameters of the generic function declaration _and_ the parameters of
23254 DECL itself. This is useful because we want to emit specific DIEs for
23255 function parameter packs and those are declared as part of the
23256 generic function declaration. In that particular case,
23257 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23258 That DIE has children DIEs representing the set of arguments
23259 of the pack. Note that the set of pack arguments can be empty.
23260 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23261 children DIE.
23262
23263 Otherwise, we just consider the parameters of DECL. */
23264 while (generic_decl_parm || parm)
23265 {
23266 if (generic_decl_parm
23267 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23268 gen_formal_parameter_pack_die (generic_decl_parm,
23269 parm, subr_die,
23270 &parm);
23271 else if (parm)
23272 {
23273 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23274
23275 if (early_dwarf
23276 && parm == DECL_ARGUMENTS (decl)
23277 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23278 && parm_die
23279 && (dwarf_version >= 3 || !dwarf_strict))
23280 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23281
23282 parm = DECL_CHAIN (parm);
23283 }
23284
23285 if (generic_decl_parm)
23286 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23287 }
23288
23289 /* Decide whether we need an unspecified_parameters DIE at the end.
23290 There are 2 more cases to do this for: 1) the ansi ... declaration -
23291 this is detectable when the end of the arg list is not a
23292 void_type_node 2) an unprototyped function declaration (not a
23293 definition). This just means that we have no info about the
23294 parameters at all. */
23295 if (early_dwarf)
23296 {
23297 if (prototype_p (TREE_TYPE (decl)))
23298 {
23299 /* This is the prototyped case, check for.... */
23300 if (stdarg_p (TREE_TYPE (decl)))
23301 gen_unspecified_parameters_die (decl, subr_die);
23302 }
23303 else if (DECL_INITIAL (decl) == NULL_TREE)
23304 gen_unspecified_parameters_die (decl, subr_die);
23305 }
23306 }
23307
23308 if (subr_die != old_die)
23309 /* Add the calling convention attribute if requested. */
23310 add_calling_convention_attribute (subr_die, decl);
23311
23312 /* Output Dwarf info for all of the stuff within the body of the function
23313 (if it has one - it may be just a declaration).
23314
23315 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23316 a function. This BLOCK actually represents the outermost binding contour
23317 for the function, i.e. the contour in which the function's formal
23318 parameters and labels get declared. Curiously, it appears that the front
23319 end doesn't actually put the PARM_DECL nodes for the current function onto
23320 the BLOCK_VARS list for this outer scope, but are strung off of the
23321 DECL_ARGUMENTS list for the function instead.
23322
23323 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23324 the LABEL_DECL nodes for the function however, and we output DWARF info
23325 for those in decls_for_scope. Just within the `outer_scope' there will be
23326 a BLOCK node representing the function's outermost pair of curly braces,
23327 and any blocks used for the base and member initializers of a C++
23328 constructor function. */
23329 tree outer_scope = DECL_INITIAL (decl);
23330 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23331 {
23332 int call_site_note_count = 0;
23333 int tail_call_site_note_count = 0;
23334
23335 /* Emit a DW_TAG_variable DIE for a named return value. */
23336 if (DECL_NAME (DECL_RESULT (decl)))
23337 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23338
23339 /* The first time through decls_for_scope we will generate the
23340 DIEs for the locals. The second time, we fill in the
23341 location info. */
23342 decls_for_scope (outer_scope, subr_die);
23343
23344 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23345 {
23346 struct call_arg_loc_node *ca_loc;
23347 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23348 {
23349 dw_die_ref die = NULL;
23350 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23351 rtx arg, next_arg;
23352 tree arg_decl = NULL_TREE;
23353
23354 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23355 ? XEXP (ca_loc->call_arg_loc_note, 0)
23356 : NULL_RTX);
23357 arg; arg = next_arg)
23358 {
23359 dw_loc_descr_ref reg, val;
23360 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23361 dw_die_ref cdie, tdie = NULL;
23362
23363 next_arg = XEXP (arg, 1);
23364 if (REG_P (XEXP (XEXP (arg, 0), 0))
23365 && next_arg
23366 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23367 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23368 && REGNO (XEXP (XEXP (arg, 0), 0))
23369 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23370 next_arg = XEXP (next_arg, 1);
23371 if (mode == VOIDmode)
23372 {
23373 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23374 if (mode == VOIDmode)
23375 mode = GET_MODE (XEXP (arg, 0));
23376 }
23377 if (mode == VOIDmode || mode == BLKmode)
23378 continue;
23379 /* Get dynamic information about call target only if we
23380 have no static information: we cannot generate both
23381 DW_AT_call_origin and DW_AT_call_target
23382 attributes. */
23383 if (ca_loc->symbol_ref == NULL_RTX)
23384 {
23385 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23386 {
23387 tloc = XEXP (XEXP (arg, 0), 1);
23388 continue;
23389 }
23390 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23391 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23392 {
23393 tlocc = XEXP (XEXP (arg, 0), 1);
23394 continue;
23395 }
23396 }
23397 reg = NULL;
23398 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23399 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23400 VAR_INIT_STATUS_INITIALIZED);
23401 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23402 {
23403 rtx mem = XEXP (XEXP (arg, 0), 0);
23404 reg = mem_loc_descriptor (XEXP (mem, 0),
23405 get_address_mode (mem),
23406 GET_MODE (mem),
23407 VAR_INIT_STATUS_INITIALIZED);
23408 }
23409 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23410 == DEBUG_PARAMETER_REF)
23411 {
23412 tree tdecl
23413 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23414 tdie = lookup_decl_die (tdecl);
23415 if (tdie == NULL)
23416 continue;
23417 arg_decl = tdecl;
23418 }
23419 else
23420 continue;
23421 if (reg == NULL
23422 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23423 != DEBUG_PARAMETER_REF)
23424 continue;
23425 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23426 VOIDmode,
23427 VAR_INIT_STATUS_INITIALIZED);
23428 if (val == NULL)
23429 continue;
23430 if (die == NULL)
23431 die = gen_call_site_die (decl, subr_die, ca_loc);
23432 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23433 NULL_TREE);
23434 add_desc_attribute (cdie, arg_decl);
23435 if (reg != NULL)
23436 add_AT_loc (cdie, DW_AT_location, reg);
23437 else if (tdie != NULL)
23438 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23439 tdie);
23440 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23441 if (next_arg != XEXP (arg, 1))
23442 {
23443 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23444 if (mode == VOIDmode)
23445 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23446 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23447 0), 1),
23448 mode, VOIDmode,
23449 VAR_INIT_STATUS_INITIALIZED);
23450 if (val != NULL)
23451 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23452 val);
23453 }
23454 }
23455 if (die == NULL
23456 && (ca_loc->symbol_ref || tloc))
23457 die = gen_call_site_die (decl, subr_die, ca_loc);
23458 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23459 {
23460 dw_loc_descr_ref tval = NULL;
23461
23462 if (tloc != NULL_RTX)
23463 tval = mem_loc_descriptor (tloc,
23464 GET_MODE (tloc) == VOIDmode
23465 ? Pmode : GET_MODE (tloc),
23466 VOIDmode,
23467 VAR_INIT_STATUS_INITIALIZED);
23468 if (tval)
23469 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23470 else if (tlocc != NULL_RTX)
23471 {
23472 tval = mem_loc_descriptor (tlocc,
23473 GET_MODE (tlocc) == VOIDmode
23474 ? Pmode : GET_MODE (tlocc),
23475 VOIDmode,
23476 VAR_INIT_STATUS_INITIALIZED);
23477 if (tval)
23478 add_AT_loc (die,
23479 dwarf_AT (DW_AT_call_target_clobbered),
23480 tval);
23481 }
23482 }
23483 if (die != NULL)
23484 {
23485 call_site_note_count++;
23486 if (ca_loc->tail_call_p)
23487 tail_call_site_note_count++;
23488 }
23489 }
23490 }
23491 call_arg_locations = NULL;
23492 call_arg_loc_last = NULL;
23493 if (tail_call_site_count >= 0
23494 && tail_call_site_count == tail_call_site_note_count
23495 && (!dwarf_strict || dwarf_version >= 5))
23496 {
23497 if (call_site_count >= 0
23498 && call_site_count == call_site_note_count)
23499 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23500 else
23501 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23502 }
23503 call_site_count = -1;
23504 tail_call_site_count = -1;
23505 }
23506
23507 /* Mark used types after we have created DIEs for the functions scopes. */
23508 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23509 }
23510
23511 /* Returns a hash value for X (which really is a die_struct). */
23512
23513 hashval_t
23514 block_die_hasher::hash (die_struct *d)
23515 {
23516 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23517 }
23518
23519 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23520 as decl_id and die_parent of die_struct Y. */
23521
23522 bool
23523 block_die_hasher::equal (die_struct *x, die_struct *y)
23524 {
23525 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23526 }
23527
23528 /* Hold information about markers for inlined entry points. */
23529 struct GTY ((for_user)) inline_entry_data
23530 {
23531 /* The block that's the inlined_function_outer_scope for an inlined
23532 function. */
23533 tree block;
23534
23535 /* The label at the inlined entry point. */
23536 const char *label_pfx;
23537 unsigned int label_num;
23538
23539 /* The view number to be used as the inlined entry point. */
23540 var_loc_view view;
23541 };
23542
23543 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23544 {
23545 typedef tree compare_type;
23546 static inline hashval_t hash (const inline_entry_data *);
23547 static inline bool equal (const inline_entry_data *, const_tree);
23548 };
23549
23550 /* Hash table routines for inline_entry_data. */
23551
23552 inline hashval_t
23553 inline_entry_data_hasher::hash (const inline_entry_data *data)
23554 {
23555 return htab_hash_pointer (data->block);
23556 }
23557
23558 inline bool
23559 inline_entry_data_hasher::equal (const inline_entry_data *data,
23560 const_tree block)
23561 {
23562 return data->block == block;
23563 }
23564
23565 /* Inlined entry points pending DIE creation in this compilation unit. */
23566
23567 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23568
23569
23570 /* Return TRUE if DECL, which may have been previously generated as
23571 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23572 true if decl (or its origin) is either an extern declaration or a
23573 class/namespace scoped declaration.
23574
23575 The declare_in_namespace support causes us to get two DIEs for one
23576 variable, both of which are declarations. We want to avoid
23577 considering one to be a specification, so we must test for
23578 DECLARATION and DW_AT_declaration. */
23579 static inline bool
23580 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23581 {
23582 return (old_die && TREE_STATIC (decl) && !declaration
23583 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23584 }
23585
23586 /* Return true if DECL is a local static. */
23587
23588 static inline bool
23589 local_function_static (tree decl)
23590 {
23591 gcc_assert (VAR_P (decl));
23592 return TREE_STATIC (decl)
23593 && DECL_CONTEXT (decl)
23594 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23595 }
23596
23597 /* Return true iff DECL overrides (presumably completes) the type of
23598 OLD_DIE within CONTEXT_DIE. */
23599
23600 static bool
23601 override_type_for_decl_p (tree decl, dw_die_ref old_die,
23602 dw_die_ref context_die)
23603 {
23604 tree type = TREE_TYPE (decl);
23605 int cv_quals;
23606
23607 if (decl_by_reference_p (decl))
23608 {
23609 type = TREE_TYPE (type);
23610 cv_quals = TYPE_UNQUALIFIED;
23611 }
23612 else
23613 cv_quals = decl_quals (decl);
23614
23615 dw_die_ref type_die = modified_type_die (type,
23616 cv_quals | TYPE_QUALS (type),
23617 false,
23618 context_die);
23619
23620 dw_die_ref old_type_die = get_AT_ref (old_die, DW_AT_type);
23621
23622 return type_die != old_type_die;
23623 }
23624
23625 /* Generate a DIE to represent a declared data object.
23626 Either DECL or ORIGIN must be non-null. */
23627
23628 static void
23629 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23630 {
23631 HOST_WIDE_INT off = 0;
23632 tree com_decl;
23633 tree decl_or_origin = decl ? decl : origin;
23634 tree ultimate_origin;
23635 dw_die_ref var_die;
23636 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23637 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23638 || class_or_namespace_scope_p (context_die));
23639 bool specialization_p = false;
23640 bool no_linkage_name = false;
23641
23642 /* While C++ inline static data members have definitions inside of the
23643 class, force the first DIE to be a declaration, then let gen_member_die
23644 reparent it to the class context and call gen_variable_die again
23645 to create the outside of the class DIE for the definition. */
23646 if (!declaration
23647 && old_die == NULL
23648 && decl
23649 && DECL_CONTEXT (decl)
23650 && TYPE_P (DECL_CONTEXT (decl))
23651 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23652 {
23653 declaration = true;
23654 if (dwarf_version < 5)
23655 no_linkage_name = true;
23656 }
23657
23658 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23659 if (decl || ultimate_origin)
23660 origin = ultimate_origin;
23661 com_decl = fortran_common (decl_or_origin, &off);
23662
23663 /* Symbol in common gets emitted as a child of the common block, in the form
23664 of a data member. */
23665 if (com_decl)
23666 {
23667 dw_die_ref com_die;
23668 dw_loc_list_ref loc = NULL;
23669 die_node com_die_arg;
23670
23671 var_die = lookup_decl_die (decl_or_origin);
23672 if (var_die)
23673 {
23674 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23675 {
23676 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23677 if (loc)
23678 {
23679 if (off)
23680 {
23681 /* Optimize the common case. */
23682 if (single_element_loc_list_p (loc)
23683 && loc->expr->dw_loc_opc == DW_OP_addr
23684 && loc->expr->dw_loc_next == NULL
23685 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23686 == SYMBOL_REF)
23687 {
23688 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23689 loc->expr->dw_loc_oprnd1.v.val_addr
23690 = plus_constant (GET_MODE (x), x , off);
23691 }
23692 else
23693 loc_list_plus_const (loc, off);
23694 }
23695 add_AT_location_description (var_die, DW_AT_location, loc);
23696 remove_AT (var_die, DW_AT_declaration);
23697 }
23698 }
23699 return;
23700 }
23701
23702 if (common_block_die_table == NULL)
23703 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23704
23705 com_die_arg.decl_id = DECL_UID (com_decl);
23706 com_die_arg.die_parent = context_die;
23707 com_die = common_block_die_table->find (&com_die_arg);
23708 if (! early_dwarf)
23709 loc = loc_list_from_tree (com_decl, 2, NULL);
23710 if (com_die == NULL)
23711 {
23712 const char *cnam
23713 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23714 die_node **slot;
23715
23716 com_die = new_die (DW_TAG_common_block, context_die, decl);
23717 add_name_and_src_coords_attributes (com_die, com_decl);
23718 if (loc)
23719 {
23720 add_AT_location_description (com_die, DW_AT_location, loc);
23721 /* Avoid sharing the same loc descriptor between
23722 DW_TAG_common_block and DW_TAG_variable. */
23723 loc = loc_list_from_tree (com_decl, 2, NULL);
23724 }
23725 else if (DECL_EXTERNAL (decl_or_origin))
23726 add_AT_flag (com_die, DW_AT_declaration, 1);
23727 if (want_pubnames ())
23728 add_pubname_string (cnam, com_die); /* ??? needed? */
23729 com_die->decl_id = DECL_UID (com_decl);
23730 slot = common_block_die_table->find_slot (com_die, INSERT);
23731 *slot = com_die;
23732 }
23733 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23734 {
23735 add_AT_location_description (com_die, DW_AT_location, loc);
23736 loc = loc_list_from_tree (com_decl, 2, NULL);
23737 remove_AT (com_die, DW_AT_declaration);
23738 }
23739 var_die = new_die (DW_TAG_variable, com_die, decl);
23740 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23741 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23742 decl_quals (decl_or_origin), false,
23743 context_die);
23744 add_alignment_attribute (var_die, decl);
23745 add_AT_flag (var_die, DW_AT_external, 1);
23746 if (loc)
23747 {
23748 if (off)
23749 {
23750 /* Optimize the common case. */
23751 if (single_element_loc_list_p (loc)
23752 && loc->expr->dw_loc_opc == DW_OP_addr
23753 && loc->expr->dw_loc_next == NULL
23754 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23755 {
23756 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23757 loc->expr->dw_loc_oprnd1.v.val_addr
23758 = plus_constant (GET_MODE (x), x, off);
23759 }
23760 else
23761 loc_list_plus_const (loc, off);
23762 }
23763 add_AT_location_description (var_die, DW_AT_location, loc);
23764 }
23765 else if (DECL_EXTERNAL (decl_or_origin))
23766 add_AT_flag (var_die, DW_AT_declaration, 1);
23767 if (decl)
23768 equate_decl_number_to_die (decl, var_die);
23769 return;
23770 }
23771
23772 if (old_die)
23773 {
23774 if (declaration)
23775 {
23776 /* A declaration that has been previously dumped, needs no
23777 further annotations, since it doesn't need location on
23778 the second pass. */
23779 return;
23780 }
23781 else if (decl_will_get_specification_p (old_die, decl, declaration)
23782 && !get_AT (old_die, DW_AT_specification))
23783 {
23784 /* Fall-thru so we can make a new variable die along with a
23785 DW_AT_specification. */
23786 }
23787 else if (origin && old_die->die_parent != context_die)
23788 {
23789 /* If we will be creating an inlined instance, we need a
23790 new DIE that will get annotated with
23791 DW_AT_abstract_origin. */
23792 gcc_assert (!DECL_ABSTRACT_P (decl));
23793 }
23794 else
23795 {
23796 /* If a DIE was dumped early, it still needs location info.
23797 Skip to where we fill the location bits. */
23798 var_die = old_die;
23799
23800 /* ??? In LTRANS we cannot annotate early created variably
23801 modified type DIEs without copying them and adjusting all
23802 references to them. Thus we dumped them again. Also add a
23803 reference to them but beware of -g0 compile and -g link
23804 in which case the reference will be already present. */
23805 tree type = TREE_TYPE (decl_or_origin);
23806 if (in_lto_p
23807 && ! get_AT (var_die, DW_AT_type)
23808 && variably_modified_type_p
23809 (type, decl_function_context (decl_or_origin)))
23810 {
23811 if (decl_by_reference_p (decl_or_origin))
23812 add_type_attribute (var_die, TREE_TYPE (type),
23813 TYPE_UNQUALIFIED, false, context_die);
23814 else
23815 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23816 false, context_die);
23817 }
23818
23819 goto gen_variable_die_location;
23820 }
23821 }
23822
23823 /* For static data members, the declaration in the class is supposed
23824 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23825 also in DWARF2; the specification should still be DW_TAG_variable
23826 referencing the DW_TAG_member DIE. */
23827 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23828 var_die = new_die (DW_TAG_member, context_die, decl);
23829 else
23830 var_die = new_die (DW_TAG_variable, context_die, decl);
23831
23832 if (origin != NULL)
23833 add_abstract_origin_attribute (var_die, origin);
23834
23835 /* Loop unrolling can create multiple blocks that refer to the same
23836 static variable, so we must test for the DW_AT_declaration flag.
23837
23838 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23839 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23840 sharing them.
23841
23842 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23843 else if (decl_will_get_specification_p (old_die, decl, declaration))
23844 {
23845 /* This is a definition of a C++ class level static. */
23846 add_AT_specification (var_die, old_die);
23847 specialization_p = true;
23848 if (DECL_NAME (decl))
23849 {
23850 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23851 struct dwarf_file_data * file_index = lookup_filename (s.file);
23852
23853 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23854 add_AT_file (var_die, DW_AT_decl_file, file_index);
23855
23856 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23857 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23858
23859 if (debug_column_info
23860 && s.column
23861 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23862 != (unsigned) s.column))
23863 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23864
23865 if (old_die->die_tag == DW_TAG_member)
23866 add_linkage_name (var_die, decl);
23867 }
23868 }
23869 else
23870 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23871
23872 if ((origin == NULL && !specialization_p)
23873 || (origin != NULL
23874 && !DECL_ABSTRACT_P (decl_or_origin)
23875 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23876 decl_function_context
23877 (decl_or_origin)))
23878 || (old_die && specialization_p
23879 && override_type_for_decl_p (decl_or_origin, old_die, context_die)))
23880 {
23881 tree type = TREE_TYPE (decl_or_origin);
23882
23883 if (decl_by_reference_p (decl_or_origin))
23884 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23885 context_die);
23886 else
23887 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23888 context_die);
23889 }
23890
23891 if (origin == NULL && !specialization_p)
23892 {
23893 if (TREE_PUBLIC (decl))
23894 add_AT_flag (var_die, DW_AT_external, 1);
23895
23896 if (DECL_ARTIFICIAL (decl))
23897 add_AT_flag (var_die, DW_AT_artificial, 1);
23898
23899 add_alignment_attribute (var_die, decl);
23900
23901 add_accessibility_attribute (var_die, decl);
23902 }
23903
23904 if (declaration)
23905 add_AT_flag (var_die, DW_AT_declaration, 1);
23906
23907 if (decl && (DECL_ABSTRACT_P (decl)
23908 || !old_die || is_declaration_die (old_die)))
23909 equate_decl_number_to_die (decl, var_die);
23910
23911 gen_variable_die_location:
23912 if (! declaration
23913 && (! DECL_ABSTRACT_P (decl_or_origin)
23914 /* Local static vars are shared between all clones/inlines,
23915 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23916 already set. */
23917 || (VAR_P (decl_or_origin)
23918 && TREE_STATIC (decl_or_origin)
23919 && DECL_RTL_SET_P (decl_or_origin))))
23920 {
23921 if (early_dwarf)
23922 add_pubname (decl_or_origin, var_die);
23923 else
23924 add_location_or_const_value_attribute (var_die, decl_or_origin,
23925 decl == NULL);
23926 }
23927 else
23928 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23929
23930 if ((dwarf_version >= 4 || !dwarf_strict)
23931 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23932 DW_AT_const_expr) == 1
23933 && !get_AT (var_die, DW_AT_const_expr)
23934 && !specialization_p)
23935 add_AT_flag (var_die, DW_AT_const_expr, 1);
23936
23937 if (!dwarf_strict)
23938 {
23939 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23940 DW_AT_inline);
23941 if (inl != -1
23942 && !get_AT (var_die, DW_AT_inline)
23943 && !specialization_p)
23944 add_AT_unsigned (var_die, DW_AT_inline, inl);
23945 }
23946 }
23947
23948 /* Generate a DIE to represent a named constant. */
23949
23950 static void
23951 gen_const_die (tree decl, dw_die_ref context_die)
23952 {
23953 dw_die_ref const_die;
23954 tree type = TREE_TYPE (decl);
23955
23956 const_die = lookup_decl_die (decl);
23957 if (const_die)
23958 return;
23959
23960 const_die = new_die (DW_TAG_constant, context_die, decl);
23961 equate_decl_number_to_die (decl, const_die);
23962 add_name_and_src_coords_attributes (const_die, decl);
23963 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23964 if (TREE_PUBLIC (decl))
23965 add_AT_flag (const_die, DW_AT_external, 1);
23966 if (DECL_ARTIFICIAL (decl))
23967 add_AT_flag (const_die, DW_AT_artificial, 1);
23968 tree_add_const_value_attribute_for_decl (const_die, decl);
23969 }
23970
23971 /* Generate a DIE to represent a label identifier. */
23972
23973 static void
23974 gen_label_die (tree decl, dw_die_ref context_die)
23975 {
23976 tree origin = decl_ultimate_origin (decl);
23977 dw_die_ref lbl_die = lookup_decl_die (decl);
23978 rtx insn;
23979 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23980
23981 if (!lbl_die)
23982 {
23983 lbl_die = new_die (DW_TAG_label, context_die, decl);
23984 equate_decl_number_to_die (decl, lbl_die);
23985
23986 if (origin != NULL)
23987 add_abstract_origin_attribute (lbl_die, origin);
23988 else
23989 add_name_and_src_coords_attributes (lbl_die, decl);
23990 }
23991
23992 if (DECL_ABSTRACT_P (decl))
23993 equate_decl_number_to_die (decl, lbl_die);
23994 else if (! early_dwarf)
23995 {
23996 insn = DECL_RTL_IF_SET (decl);
23997
23998 /* Deleted labels are programmer specified labels which have been
23999 eliminated because of various optimizations. We still emit them
24000 here so that it is possible to put breakpoints on them. */
24001 if (insn
24002 && (LABEL_P (insn)
24003 || ((NOTE_P (insn)
24004 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
24005 {
24006 /* When optimization is enabled (via -O) some parts of the compiler
24007 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
24008 represent source-level labels which were explicitly declared by
24009 the user. This really shouldn't be happening though, so catch
24010 it if it ever does happen. */
24011 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
24012
24013 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
24014 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24015 }
24016 else if (insn
24017 && NOTE_P (insn)
24018 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24019 && CODE_LABEL_NUMBER (insn) != -1)
24020 {
24021 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24022 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24023 }
24024 }
24025 }
24026
24027 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24028 attributes to the DIE for a block STMT, to describe where the inlined
24029 function was called from. This is similar to add_src_coords_attributes. */
24030
24031 static inline void
24032 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24033 {
24034 /* We can end up with BUILTINS_LOCATION here. */
24035 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24036 return;
24037
24038 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24039
24040 if (dwarf_version >= 3 || !dwarf_strict)
24041 {
24042 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24043 add_AT_unsigned (die, DW_AT_call_line, s.line);
24044 if (debug_column_info && s.column)
24045 add_AT_unsigned (die, DW_AT_call_column, s.column);
24046 }
24047 }
24048
24049
24050 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24051 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24052
24053 static inline void
24054 add_high_low_attributes (tree stmt, dw_die_ref die)
24055 {
24056 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24057
24058 if (inline_entry_data **iedp
24059 = !inline_entry_data_table ? NULL
24060 : inline_entry_data_table->find_slot_with_hash (stmt,
24061 htab_hash_pointer (stmt),
24062 NO_INSERT))
24063 {
24064 inline_entry_data *ied = *iedp;
24065 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24066 gcc_assert (debug_inline_points);
24067 gcc_assert (inlined_function_outer_scope_p (stmt));
24068
24069 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24070 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24071
24072 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24073 && !dwarf_strict)
24074 {
24075 if (!output_asm_line_debug_info ())
24076 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24077 else
24078 {
24079 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24080 /* FIXME: this will resolve to a small number. Could we
24081 possibly emit smaller data? Ideally we'd emit a
24082 uleb128, but that would make the size of DIEs
24083 impossible for the compiler to compute, since it's
24084 the assembler that computes the value of the view
24085 label in this case. Ideally, we'd have a single form
24086 encompassing both the address and the view, and
24087 indirecting them through a table might make things
24088 easier, but even that would be more wasteful,
24089 space-wise, than what we have now. */
24090 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24091 }
24092 }
24093
24094 inline_entry_data_table->clear_slot (iedp);
24095 }
24096
24097 if (BLOCK_FRAGMENT_CHAIN (stmt)
24098 && (dwarf_version >= 3 || !dwarf_strict))
24099 {
24100 tree chain, superblock = NULL_TREE;
24101 dw_die_ref pdie;
24102 dw_attr_node *attr = NULL;
24103
24104 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24105 {
24106 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24107 BLOCK_NUMBER (stmt));
24108 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24109 }
24110
24111 /* Optimize duplicate .debug_ranges lists or even tails of
24112 lists. If this BLOCK has same ranges as its supercontext,
24113 lookup DW_AT_ranges attribute in the supercontext (and
24114 recursively so), verify that the ranges_table contains the
24115 right values and use it instead of adding a new .debug_range. */
24116 for (chain = stmt, pdie = die;
24117 BLOCK_SAME_RANGE (chain);
24118 chain = BLOCK_SUPERCONTEXT (chain))
24119 {
24120 dw_attr_node *new_attr;
24121
24122 pdie = pdie->die_parent;
24123 if (pdie == NULL)
24124 break;
24125 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24126 break;
24127 new_attr = get_AT (pdie, DW_AT_ranges);
24128 if (new_attr == NULL
24129 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24130 break;
24131 attr = new_attr;
24132 superblock = BLOCK_SUPERCONTEXT (chain);
24133 }
24134 if (attr != NULL
24135 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24136 == (int)BLOCK_NUMBER (superblock))
24137 && BLOCK_FRAGMENT_CHAIN (superblock))
24138 {
24139 unsigned long off = attr->dw_attr_val.v.val_offset;
24140 unsigned long supercnt = 0, thiscnt = 0;
24141 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24142 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24143 {
24144 ++supercnt;
24145 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24146 == (int)BLOCK_NUMBER (chain));
24147 }
24148 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24149 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24150 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24151 ++thiscnt;
24152 gcc_assert (supercnt >= thiscnt);
24153 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24154 false);
24155 note_rnglist_head (off + supercnt - thiscnt);
24156 return;
24157 }
24158
24159 unsigned int offset = add_ranges (stmt, true);
24160 add_AT_range_list (die, DW_AT_ranges, offset, false);
24161 note_rnglist_head (offset);
24162
24163 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24164 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24165 do
24166 {
24167 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24168 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24169 chain = BLOCK_FRAGMENT_CHAIN (chain);
24170 }
24171 while (chain);
24172 add_ranges (NULL);
24173 }
24174 else
24175 {
24176 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24177 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24178 BLOCK_NUMBER (stmt));
24179 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24180 BLOCK_NUMBER (stmt));
24181 add_AT_low_high_pc (die, label, label_high, false);
24182 }
24183 }
24184
24185 /* Generate a DIE for a lexical block. */
24186
24187 static void
24188 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24189 {
24190 dw_die_ref old_die = lookup_block_die (stmt);
24191 dw_die_ref stmt_die = NULL;
24192 if (!old_die)
24193 {
24194 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24195 equate_block_to_die (stmt, stmt_die);
24196 }
24197
24198 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24199 {
24200 /* If this is an inlined or conrecte instance, create a new lexical
24201 die for anything below to attach DW_AT_abstract_origin to. */
24202 if (old_die)
24203 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24204
24205 tree origin = block_ultimate_origin (stmt);
24206 if (origin != NULL_TREE && (origin != stmt || old_die))
24207 add_abstract_origin_attribute (stmt_die, origin);
24208
24209 old_die = NULL;
24210 }
24211
24212 if (old_die)
24213 stmt_die = old_die;
24214
24215 /* A non abstract block whose blocks have already been reordered
24216 should have the instruction range for this block. If so, set the
24217 high/low attributes. */
24218 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24219 {
24220 gcc_assert (stmt_die);
24221 add_high_low_attributes (stmt, stmt_die);
24222 }
24223
24224 decls_for_scope (stmt, stmt_die);
24225 }
24226
24227 /* Generate a DIE for an inlined subprogram. */
24228
24229 static void
24230 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24231 {
24232 tree decl = block_ultimate_origin (stmt);
24233
24234 /* Make sure any inlined functions are known to be inlineable. */
24235 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24236 || cgraph_function_possibly_inlined_p (decl));
24237
24238 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24239
24240 if (call_arg_locations || debug_inline_points)
24241 equate_block_to_die (stmt, subr_die);
24242 add_abstract_origin_attribute (subr_die, decl);
24243 if (TREE_ASM_WRITTEN (stmt))
24244 add_high_low_attributes (stmt, subr_die);
24245 add_call_src_coords_attributes (stmt, subr_die);
24246
24247 /* The inliner creates an extra BLOCK for the parameter setup,
24248 we want to merge that with the actual outermost BLOCK of the
24249 inlined function to avoid duplicate locals in consumers.
24250 Do that by doing the recursion to subblocks on the single subblock
24251 of STMT. */
24252 bool unwrap_one = false;
24253 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24254 {
24255 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24256 if (origin
24257 && TREE_CODE (origin) == BLOCK
24258 && BLOCK_SUPERCONTEXT (origin) == decl)
24259 unwrap_one = true;
24260 }
24261 decls_for_scope (stmt, subr_die, !unwrap_one);
24262 if (unwrap_one)
24263 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24264 }
24265
24266 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24267 the comment for VLR_CONTEXT. */
24268
24269 static void
24270 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24271 {
24272 dw_die_ref decl_die;
24273
24274 if (TREE_TYPE (decl) == error_mark_node)
24275 return;
24276
24277 decl_die = new_die (DW_TAG_member, context_die, decl);
24278 add_name_and_src_coords_attributes (decl_die, decl);
24279 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24280 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24281 context_die);
24282
24283 if (DECL_BIT_FIELD_TYPE (decl))
24284 {
24285 add_byte_size_attribute (decl_die, decl);
24286 add_bit_size_attribute (decl_die, decl);
24287 add_bit_offset_attribute (decl_die, decl);
24288 }
24289
24290 add_alignment_attribute (decl_die, decl);
24291
24292 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24293 add_data_member_location_attribute (decl_die, decl, ctx);
24294
24295 if (DECL_ARTIFICIAL (decl))
24296 add_AT_flag (decl_die, DW_AT_artificial, 1);
24297
24298 add_accessibility_attribute (decl_die, decl);
24299
24300 /* Equate decl number to die, so that we can look up this decl later on. */
24301 equate_decl_number_to_die (decl, decl_die);
24302 }
24303
24304 /* Generate a DIE for a pointer to a member type. TYPE can be an
24305 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24306 pointer to member function. */
24307
24308 static void
24309 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24310 {
24311 if (lookup_type_die (type))
24312 return;
24313
24314 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24315 scope_die_for (type, context_die), type);
24316
24317 equate_type_number_to_die (type, ptr_die);
24318 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24319 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24320 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24321 context_die);
24322 add_alignment_attribute (ptr_die, type);
24323
24324 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24325 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24326 {
24327 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24328 add_AT_loc (ptr_die, DW_AT_use_location, op);
24329 }
24330 }
24331
24332 static char *producer_string;
24333
24334 /* Return a heap allocated producer string including command line options
24335 if -grecord-gcc-switches. */
24336
24337 static char *
24338 gen_producer_string (void)
24339 {
24340 size_t j;
24341 auto_vec<const char *> switches;
24342 const char *language_string = lang_hooks.name;
24343 char *producer, *tail;
24344 const char *p;
24345 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24346 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24347
24348 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24349 switch (save_decoded_options[j].opt_index)
24350 {
24351 case OPT_o:
24352 case OPT_d:
24353 case OPT_dumpbase:
24354 case OPT_dumpbase_ext:
24355 case OPT_dumpdir:
24356 case OPT_quiet:
24357 case OPT_version:
24358 case OPT_v:
24359 case OPT_w:
24360 case OPT_L:
24361 case OPT_D:
24362 case OPT_I:
24363 case OPT_U:
24364 case OPT_SPECIAL_unknown:
24365 case OPT_SPECIAL_ignore:
24366 case OPT_SPECIAL_warn_removed:
24367 case OPT_SPECIAL_program_name:
24368 case OPT_SPECIAL_input_file:
24369 case OPT_grecord_gcc_switches:
24370 case OPT__output_pch_:
24371 case OPT_fdiagnostics_show_location_:
24372 case OPT_fdiagnostics_show_option:
24373 case OPT_fdiagnostics_show_caret:
24374 case OPT_fdiagnostics_show_labels:
24375 case OPT_fdiagnostics_show_line_numbers:
24376 case OPT_fdiagnostics_color_:
24377 case OPT_fdiagnostics_format_:
24378 case OPT_fverbose_asm:
24379 case OPT____:
24380 case OPT__sysroot_:
24381 case OPT_nostdinc:
24382 case OPT_nostdinc__:
24383 case OPT_fpreprocessed:
24384 case OPT_fltrans_output_list_:
24385 case OPT_fresolution_:
24386 case OPT_fdebug_prefix_map_:
24387 case OPT_fmacro_prefix_map_:
24388 case OPT_ffile_prefix_map_:
24389 case OPT_fcompare_debug:
24390 case OPT_fchecking:
24391 case OPT_fchecking_:
24392 /* Ignore these. */
24393 continue;
24394 case OPT_flto_:
24395 {
24396 const char *lto_canonical = "-flto";
24397 switches.safe_push (lto_canonical);
24398 len += strlen (lto_canonical) + 1;
24399 break;
24400 }
24401 default:
24402 if (cl_options[save_decoded_options[j].opt_index].flags
24403 & CL_NO_DWARF_RECORD)
24404 continue;
24405 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24406 == '-');
24407 switch (save_decoded_options[j].canonical_option[0][1])
24408 {
24409 case 'M':
24410 case 'i':
24411 case 'W':
24412 continue;
24413 case 'f':
24414 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24415 "dump", 4) == 0)
24416 continue;
24417 break;
24418 default:
24419 break;
24420 }
24421 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24422 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24423 break;
24424 }
24425
24426 producer = XNEWVEC (char, plen + 1 + len + 1);
24427 tail = producer;
24428 sprintf (tail, "%s %s", language_string, version_string);
24429 tail += plen;
24430
24431 FOR_EACH_VEC_ELT (switches, j, p)
24432 {
24433 len = strlen (p);
24434 *tail = ' ';
24435 memcpy (tail + 1, p, len);
24436 tail += len + 1;
24437 }
24438
24439 *tail = '\0';
24440 return producer;
24441 }
24442
24443 /* Given a C and/or C++ language/version string return the "highest".
24444 C++ is assumed to be "higher" than C in this case. Used for merging
24445 LTO translation unit languages. */
24446 static const char *
24447 highest_c_language (const char *lang1, const char *lang2)
24448 {
24449 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24450 return "GNU C++17";
24451 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24452 return "GNU C++14";
24453 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24454 return "GNU C++11";
24455 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24456 return "GNU C++98";
24457
24458 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24459 return "GNU C2X";
24460 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24461 return "GNU C17";
24462 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24463 return "GNU C11";
24464 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24465 return "GNU C99";
24466 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24467 return "GNU C89";
24468
24469 gcc_unreachable ();
24470 }
24471
24472
24473 /* Generate the DIE for the compilation unit. */
24474
24475 static dw_die_ref
24476 gen_compile_unit_die (const char *filename)
24477 {
24478 dw_die_ref die;
24479 const char *language_string = lang_hooks.name;
24480 int language;
24481
24482 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24483
24484 if (filename)
24485 {
24486 add_name_attribute (die, filename);
24487 /* Don't add cwd for <built-in>. */
24488 if (filename[0] != '<')
24489 add_comp_dir_attribute (die);
24490 }
24491
24492 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24493
24494 /* If our producer is LTO try to figure out a common language to use
24495 from the global list of translation units. */
24496 if (strcmp (language_string, "GNU GIMPLE") == 0)
24497 {
24498 unsigned i;
24499 tree t;
24500 const char *common_lang = NULL;
24501
24502 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24503 {
24504 if (!TRANSLATION_UNIT_LANGUAGE (t))
24505 continue;
24506 if (!common_lang)
24507 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24508 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24509 ;
24510 else if (strncmp (common_lang, "GNU C", 5) == 0
24511 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24512 /* Mixing C and C++ is ok, use C++ in that case. */
24513 common_lang = highest_c_language (common_lang,
24514 TRANSLATION_UNIT_LANGUAGE (t));
24515 else
24516 {
24517 /* Fall back to C. */
24518 common_lang = NULL;
24519 break;
24520 }
24521 }
24522
24523 if (common_lang)
24524 language_string = common_lang;
24525 }
24526
24527 language = DW_LANG_C;
24528 if (strncmp (language_string, "GNU C", 5) == 0
24529 && ISDIGIT (language_string[5]))
24530 {
24531 language = DW_LANG_C89;
24532 if (dwarf_version >= 3 || !dwarf_strict)
24533 {
24534 if (strcmp (language_string, "GNU C89") != 0)
24535 language = DW_LANG_C99;
24536
24537 if (dwarf_version >= 5 /* || !dwarf_strict */)
24538 if (strcmp (language_string, "GNU C11") == 0
24539 || strcmp (language_string, "GNU C17") == 0
24540 || strcmp (language_string, "GNU C2X"))
24541 language = DW_LANG_C11;
24542 }
24543 }
24544 else if (strncmp (language_string, "GNU C++", 7) == 0)
24545 {
24546 language = DW_LANG_C_plus_plus;
24547 if (dwarf_version >= 5 /* || !dwarf_strict */)
24548 {
24549 if (strcmp (language_string, "GNU C++11") == 0)
24550 language = DW_LANG_C_plus_plus_11;
24551 else if (strcmp (language_string, "GNU C++14") == 0)
24552 language = DW_LANG_C_plus_plus_14;
24553 else if (strcmp (language_string, "GNU C++17") == 0)
24554 /* For now. */
24555 language = DW_LANG_C_plus_plus_14;
24556 }
24557 }
24558 else if (strcmp (language_string, "GNU F77") == 0)
24559 language = DW_LANG_Fortran77;
24560 else if (dwarf_version >= 3 || !dwarf_strict)
24561 {
24562 if (strcmp (language_string, "GNU Ada") == 0)
24563 language = DW_LANG_Ada95;
24564 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24565 {
24566 language = DW_LANG_Fortran95;
24567 if (dwarf_version >= 5 /* || !dwarf_strict */)
24568 {
24569 if (strcmp (language_string, "GNU Fortran2003") == 0)
24570 language = DW_LANG_Fortran03;
24571 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24572 language = DW_LANG_Fortran08;
24573 }
24574 }
24575 else if (strcmp (language_string, "GNU Objective-C") == 0)
24576 language = DW_LANG_ObjC;
24577 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24578 language = DW_LANG_ObjC_plus_plus;
24579 else if (strcmp (language_string, "GNU D") == 0)
24580 language = DW_LANG_D;
24581 else if (dwarf_version >= 5 || !dwarf_strict)
24582 {
24583 if (strcmp (language_string, "GNU Go") == 0)
24584 language = DW_LANG_Go;
24585 }
24586 }
24587 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24588 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24589 language = DW_LANG_Fortran90;
24590 /* Likewise for Ada. */
24591 else if (strcmp (language_string, "GNU Ada") == 0)
24592 language = DW_LANG_Ada83;
24593
24594 add_AT_unsigned (die, DW_AT_language, language);
24595
24596 switch (language)
24597 {
24598 case DW_LANG_Fortran77:
24599 case DW_LANG_Fortran90:
24600 case DW_LANG_Fortran95:
24601 case DW_LANG_Fortran03:
24602 case DW_LANG_Fortran08:
24603 /* Fortran has case insensitive identifiers and the front-end
24604 lowercases everything. */
24605 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24606 break;
24607 default:
24608 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24609 break;
24610 }
24611 return die;
24612 }
24613
24614 /* Generate the DIE for a base class. */
24615
24616 static void
24617 gen_inheritance_die (tree binfo, tree access, tree type,
24618 dw_die_ref context_die)
24619 {
24620 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24621 struct vlr_context ctx = { type, NULL };
24622
24623 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24624 context_die);
24625 add_data_member_location_attribute (die, binfo, &ctx);
24626
24627 if (BINFO_VIRTUAL_P (binfo))
24628 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24629
24630 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24631 children, otherwise the default is DW_ACCESS_public. In DWARF2
24632 the default has always been DW_ACCESS_private. */
24633 if (access == access_public_node)
24634 {
24635 if (dwarf_version == 2
24636 || context_die->die_tag == DW_TAG_class_type)
24637 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24638 }
24639 else if (access == access_protected_node)
24640 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24641 else if (dwarf_version > 2
24642 && context_die->die_tag != DW_TAG_class_type)
24643 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24644 }
24645
24646 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24647 structure. */
24648
24649 static bool
24650 is_variant_part (tree decl)
24651 {
24652 return (TREE_CODE (decl) == FIELD_DECL
24653 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24654 }
24655
24656 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24657 return the FIELD_DECL. Return NULL_TREE otherwise. */
24658
24659 static tree
24660 analyze_discr_in_predicate (tree operand, tree struct_type)
24661 {
24662 while (CONVERT_EXPR_P (operand))
24663 operand = TREE_OPERAND (operand, 0);
24664
24665 /* Match field access to members of struct_type only. */
24666 if (TREE_CODE (operand) == COMPONENT_REF
24667 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24668 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24669 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24670 return TREE_OPERAND (operand, 1);
24671 else
24672 return NULL_TREE;
24673 }
24674
24675 /* Check that SRC is a constant integer that can be represented as a native
24676 integer constant (either signed or unsigned). If so, store it into DEST and
24677 return true. Return false otherwise. */
24678
24679 static bool
24680 get_discr_value (tree src, dw_discr_value *dest)
24681 {
24682 tree discr_type = TREE_TYPE (src);
24683
24684 if (lang_hooks.types.get_debug_type)
24685 {
24686 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24687 if (debug_type != NULL)
24688 discr_type = debug_type;
24689 }
24690
24691 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24692 return false;
24693
24694 /* Signedness can vary between the original type and the debug type. This
24695 can happen for character types in Ada for instance: the character type
24696 used for code generation can be signed, to be compatible with the C one,
24697 but from a debugger point of view, it must be unsigned. */
24698 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24699 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24700
24701 if (is_orig_unsigned != is_debug_unsigned)
24702 src = fold_convert (discr_type, src);
24703
24704 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24705 return false;
24706
24707 dest->pos = is_debug_unsigned;
24708 if (is_debug_unsigned)
24709 dest->v.uval = tree_to_uhwi (src);
24710 else
24711 dest->v.sval = tree_to_shwi (src);
24712
24713 return true;
24714 }
24715
24716 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24717 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24718 store NULL_TREE in DISCR_DECL. Otherwise:
24719
24720 - store the discriminant field in STRUCT_TYPE that controls the variant
24721 part to *DISCR_DECL
24722
24723 - put in *DISCR_LISTS_P an array where for each variant, the item
24724 represents the corresponding matching list of discriminant values.
24725
24726 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24727 the above array.
24728
24729 Note that when the array is allocated (i.e. when the analysis is
24730 successful), it is up to the caller to free the array. */
24731
24732 static void
24733 analyze_variants_discr (tree variant_part_decl,
24734 tree struct_type,
24735 tree *discr_decl,
24736 dw_discr_list_ref **discr_lists_p,
24737 unsigned *discr_lists_length)
24738 {
24739 tree variant_part_type = TREE_TYPE (variant_part_decl);
24740 tree variant;
24741 dw_discr_list_ref *discr_lists;
24742 unsigned i;
24743
24744 /* Compute how many variants there are in this variant part. */
24745 *discr_lists_length = 0;
24746 for (variant = TYPE_FIELDS (variant_part_type);
24747 variant != NULL_TREE;
24748 variant = DECL_CHAIN (variant))
24749 ++*discr_lists_length;
24750
24751 *discr_decl = NULL_TREE;
24752 *discr_lists_p
24753 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24754 sizeof (**discr_lists_p));
24755 discr_lists = *discr_lists_p;
24756
24757 /* And then analyze all variants to extract discriminant information for all
24758 of them. This analysis is conservative: as soon as we detect something we
24759 do not support, abort everything and pretend we found nothing. */
24760 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24761 variant != NULL_TREE;
24762 variant = DECL_CHAIN (variant), ++i)
24763 {
24764 tree match_expr = DECL_QUALIFIER (variant);
24765
24766 /* Now, try to analyze the predicate and deduce a discriminant for
24767 it. */
24768 if (match_expr == boolean_true_node)
24769 /* Typically happens for the default variant: it matches all cases that
24770 previous variants rejected. Don't output any matching value for
24771 this one. */
24772 continue;
24773
24774 /* The following loop tries to iterate over each discriminant
24775 possibility: single values or ranges. */
24776 while (match_expr != NULL_TREE)
24777 {
24778 tree next_round_match_expr;
24779 tree candidate_discr = NULL_TREE;
24780 dw_discr_list_ref new_node = NULL;
24781
24782 /* Possibilities are matched one after the other by nested
24783 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24784 continue with the rest at next iteration. */
24785 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24786 {
24787 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24788 match_expr = TREE_OPERAND (match_expr, 1);
24789 }
24790 else
24791 next_round_match_expr = NULL_TREE;
24792
24793 if (match_expr == boolean_false_node)
24794 /* This sub-expression matches nothing: just wait for the next
24795 one. */
24796 ;
24797
24798 else if (TREE_CODE (match_expr) == EQ_EXPR)
24799 {
24800 /* We are matching: <discr_field> == <integer_cst>
24801 This sub-expression matches a single value. */
24802 tree integer_cst = TREE_OPERAND (match_expr, 1);
24803
24804 candidate_discr
24805 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24806 struct_type);
24807
24808 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24809 if (!get_discr_value (integer_cst,
24810 &new_node->dw_discr_lower_bound))
24811 goto abort;
24812 new_node->dw_discr_range = false;
24813 }
24814
24815 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24816 {
24817 /* We are matching:
24818 <discr_field> > <integer_cst>
24819 && <discr_field> < <integer_cst>.
24820 This sub-expression matches the range of values between the
24821 two matched integer constants. Note that comparisons can be
24822 inclusive or exclusive. */
24823 tree candidate_discr_1, candidate_discr_2;
24824 tree lower_cst, upper_cst;
24825 bool lower_cst_included, upper_cst_included;
24826 tree lower_op = TREE_OPERAND (match_expr, 0);
24827 tree upper_op = TREE_OPERAND (match_expr, 1);
24828
24829 /* When the comparison is exclusive, the integer constant is not
24830 the discriminant range bound we are looking for: we will have
24831 to increment or decrement it. */
24832 if (TREE_CODE (lower_op) == GE_EXPR)
24833 lower_cst_included = true;
24834 else if (TREE_CODE (lower_op) == GT_EXPR)
24835 lower_cst_included = false;
24836 else
24837 goto abort;
24838
24839 if (TREE_CODE (upper_op) == LE_EXPR)
24840 upper_cst_included = true;
24841 else if (TREE_CODE (upper_op) == LT_EXPR)
24842 upper_cst_included = false;
24843 else
24844 goto abort;
24845
24846 /* Extract the discriminant from the first operand and check it
24847 is consistant with the same analysis in the second
24848 operand. */
24849 candidate_discr_1
24850 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24851 struct_type);
24852 candidate_discr_2
24853 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24854 struct_type);
24855 if (candidate_discr_1 == candidate_discr_2)
24856 candidate_discr = candidate_discr_1;
24857 else
24858 goto abort;
24859
24860 /* Extract bounds from both. */
24861 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24862 lower_cst = TREE_OPERAND (lower_op, 1);
24863 upper_cst = TREE_OPERAND (upper_op, 1);
24864
24865 if (!lower_cst_included)
24866 lower_cst
24867 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24868 build_int_cst (TREE_TYPE (lower_cst), 1));
24869 if (!upper_cst_included)
24870 upper_cst
24871 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24872 build_int_cst (TREE_TYPE (upper_cst), 1));
24873
24874 if (!get_discr_value (lower_cst,
24875 &new_node->dw_discr_lower_bound)
24876 || !get_discr_value (upper_cst,
24877 &new_node->dw_discr_upper_bound))
24878 goto abort;
24879
24880 new_node->dw_discr_range = true;
24881 }
24882
24883 else if ((candidate_discr
24884 = analyze_discr_in_predicate (match_expr, struct_type))
24885 && (TREE_TYPE (candidate_discr) == boolean_type_node
24886 || TREE_TYPE (TREE_TYPE (candidate_discr))
24887 == boolean_type_node))
24888 {
24889 /* We are matching: <discr_field> for a boolean discriminant.
24890 This sub-expression matches boolean_true_node. */
24891 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24892 if (!get_discr_value (boolean_true_node,
24893 &new_node->dw_discr_lower_bound))
24894 goto abort;
24895 new_node->dw_discr_range = false;
24896 }
24897
24898 else
24899 /* Unsupported sub-expression: we cannot determine the set of
24900 matching discriminant values. Abort everything. */
24901 goto abort;
24902
24903 /* If the discriminant info is not consistant with what we saw so
24904 far, consider the analysis failed and abort everything. */
24905 if (candidate_discr == NULL_TREE
24906 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24907 goto abort;
24908 else
24909 *discr_decl = candidate_discr;
24910
24911 if (new_node != NULL)
24912 {
24913 new_node->dw_discr_next = discr_lists[i];
24914 discr_lists[i] = new_node;
24915 }
24916 match_expr = next_round_match_expr;
24917 }
24918 }
24919
24920 /* If we reach this point, we could match everything we were interested
24921 in. */
24922 return;
24923
24924 abort:
24925 /* Clean all data structure and return no result. */
24926 free (*discr_lists_p);
24927 *discr_lists_p = NULL;
24928 *discr_decl = NULL_TREE;
24929 }
24930
24931 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24932 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24933 under CONTEXT_DIE.
24934
24935 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24936 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24937 this type, which are record types, represent the available variants and each
24938 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24939 values are inferred from these attributes.
24940
24941 In trees, the offsets for the fields inside these sub-records are relative
24942 to the variant part itself, whereas the corresponding DIEs should have
24943 offset attributes that are relative to the embedding record base address.
24944 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24945 must be an expression that computes the offset of the variant part to
24946 describe in DWARF. */
24947
24948 static void
24949 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24950 dw_die_ref context_die)
24951 {
24952 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24953 tree variant_part_offset = vlr_ctx->variant_part_offset;
24954 struct loc_descr_context ctx = {
24955 vlr_ctx->struct_type, /* context_type */
24956 NULL_TREE, /* base_decl */
24957 NULL, /* dpi */
24958 false, /* placeholder_arg */
24959 false /* placeholder_seen */
24960 };
24961
24962 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24963 NULL_TREE if there is no such field. */
24964 tree discr_decl = NULL_TREE;
24965 dw_discr_list_ref *discr_lists;
24966 unsigned discr_lists_length = 0;
24967 unsigned i;
24968
24969 dw_die_ref dwarf_proc_die = NULL;
24970 dw_die_ref variant_part_die
24971 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24972
24973 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24974
24975 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24976 &discr_decl, &discr_lists, &discr_lists_length);
24977
24978 if (discr_decl != NULL_TREE)
24979 {
24980 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24981
24982 if (discr_die)
24983 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24984 else
24985 /* We have no DIE for the discriminant, so just discard all
24986 discrimimant information in the output. */
24987 discr_decl = NULL_TREE;
24988 }
24989
24990 /* If the offset for this variant part is more complex than a constant,
24991 create a DWARF procedure for it so that we will not have to generate DWARF
24992 expressions for it for each member. */
24993 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24994 && (dwarf_version >= 3 || !dwarf_strict))
24995 {
24996 const tree dwarf_proc_fndecl
24997 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24998 build_function_type (TREE_TYPE (variant_part_offset),
24999 NULL_TREE));
25000 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
25001 const dw_loc_descr_ref dwarf_proc_body
25002 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
25003
25004 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
25005 dwarf_proc_fndecl, context_die);
25006 if (dwarf_proc_die != NULL)
25007 variant_part_offset = dwarf_proc_call;
25008 }
25009
25010 /* Output DIEs for all variants. */
25011 i = 0;
25012 for (tree variant = TYPE_FIELDS (variant_part_type);
25013 variant != NULL_TREE;
25014 variant = DECL_CHAIN (variant), ++i)
25015 {
25016 tree variant_type = TREE_TYPE (variant);
25017 dw_die_ref variant_die;
25018
25019 /* All variants (i.e. members of a variant part) are supposed to be
25020 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25021 under these records. */
25022 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25023
25024 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25025 equate_decl_number_to_die (variant, variant_die);
25026
25027 /* Output discriminant values this variant matches, if any. */
25028 if (discr_decl == NULL || discr_lists[i] == NULL)
25029 /* In the case we have discriminant information at all, this is
25030 probably the default variant: as the standard says, don't
25031 output any discriminant value/list attribute. */
25032 ;
25033 else if (discr_lists[i]->dw_discr_next == NULL
25034 && !discr_lists[i]->dw_discr_range)
25035 /* If there is only one accepted value, don't bother outputting a
25036 list. */
25037 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25038 else
25039 add_discr_list (variant_die, discr_lists[i]);
25040
25041 for (tree member = TYPE_FIELDS (variant_type);
25042 member != NULL_TREE;
25043 member = DECL_CHAIN (member))
25044 {
25045 struct vlr_context vlr_sub_ctx = {
25046 vlr_ctx->struct_type, /* struct_type */
25047 NULL /* variant_part_offset */
25048 };
25049 if (is_variant_part (member))
25050 {
25051 /* All offsets for fields inside variant parts are relative to
25052 the top-level embedding RECORD_TYPE's base address. On the
25053 other hand, offsets in GCC's types are relative to the
25054 nested-most variant part. So we have to sum offsets each time
25055 we recurse. */
25056
25057 vlr_sub_ctx.variant_part_offset
25058 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25059 variant_part_offset, byte_position (member));
25060 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25061 }
25062 else
25063 {
25064 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25065 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25066 }
25067 }
25068 }
25069
25070 free (discr_lists);
25071 }
25072
25073 /* Generate a DIE for a class member. */
25074
25075 static void
25076 gen_member_die (tree type, dw_die_ref context_die)
25077 {
25078 tree member;
25079 tree binfo = TYPE_BINFO (type);
25080
25081 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25082
25083 /* If this is not an incomplete type, output descriptions of each of its
25084 members. Note that as we output the DIEs necessary to represent the
25085 members of this record or union type, we will also be trying to output
25086 DIEs to represent the *types* of those members. However the `type'
25087 function (above) will specifically avoid generating type DIEs for member
25088 types *within* the list of member DIEs for this (containing) type except
25089 for those types (of members) which are explicitly marked as also being
25090 members of this (containing) type themselves. The g++ front- end can
25091 force any given type to be treated as a member of some other (containing)
25092 type by setting the TYPE_CONTEXT of the given (member) type to point to
25093 the TREE node representing the appropriate (containing) type. */
25094
25095 /* First output info about the base classes. */
25096 if (binfo && early_dwarf)
25097 {
25098 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25099 int i;
25100 tree base;
25101
25102 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25103 gen_inheritance_die (base,
25104 (accesses ? (*accesses)[i] : access_public_node),
25105 type,
25106 context_die);
25107 }
25108
25109 /* Now output info about the members. */
25110 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25111 {
25112 /* Ignore clones. */
25113 if (DECL_ABSTRACT_ORIGIN (member))
25114 continue;
25115
25116 struct vlr_context vlr_ctx = { type, NULL_TREE };
25117 bool static_inline_p
25118 = (VAR_P (member)
25119 && TREE_STATIC (member)
25120 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25121 != -1));
25122
25123 /* If we thought we were generating minimal debug info for TYPE
25124 and then changed our minds, some of the member declarations
25125 may have already been defined. Don't define them again, but
25126 do put them in the right order. */
25127
25128 if (dw_die_ref child = lookup_decl_die (member))
25129 {
25130 /* Handle inline static data members, which only have in-class
25131 declarations. */
25132 bool splice = true;
25133
25134 dw_die_ref ref = NULL;
25135 if (child->die_tag == DW_TAG_variable
25136 && child->die_parent == comp_unit_die ())
25137 {
25138 ref = get_AT_ref (child, DW_AT_specification);
25139
25140 /* For C++17 inline static data members followed by redundant
25141 out of class redeclaration, we might get here with
25142 child being the DIE created for the out of class
25143 redeclaration and with its DW_AT_specification being
25144 the DIE created for in-class definition. We want to
25145 reparent the latter, and don't want to create another
25146 DIE with DW_AT_specification in that case, because
25147 we already have one. */
25148 if (ref
25149 && static_inline_p
25150 && ref->die_tag == DW_TAG_variable
25151 && ref->die_parent == comp_unit_die ()
25152 && get_AT (ref, DW_AT_specification) == NULL)
25153 {
25154 child = ref;
25155 ref = NULL;
25156 static_inline_p = false;
25157 }
25158
25159 if (!ref)
25160 {
25161 reparent_child (child, context_die);
25162 if (dwarf_version < 5)
25163 child->die_tag = DW_TAG_member;
25164 splice = false;
25165 }
25166 }
25167
25168 if (splice)
25169 splice_child_die (context_die, child);
25170 }
25171
25172 /* Do not generate standard DWARF for variant parts if we are generating
25173 the corresponding GNAT encodings: DIEs generated for both would
25174 conflict in our mappings. */
25175 else if (is_variant_part (member)
25176 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25177 {
25178 vlr_ctx.variant_part_offset = byte_position (member);
25179 gen_variant_part (member, &vlr_ctx, context_die);
25180 }
25181 else
25182 {
25183 vlr_ctx.variant_part_offset = NULL_TREE;
25184 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25185 }
25186
25187 /* For C++ inline static data members emit immediately a DW_TAG_variable
25188 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25189 DW_AT_specification. */
25190 if (static_inline_p)
25191 {
25192 int old_extern = DECL_EXTERNAL (member);
25193 DECL_EXTERNAL (member) = 0;
25194 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25195 DECL_EXTERNAL (member) = old_extern;
25196 }
25197 }
25198 }
25199
25200 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25201 is set, we pretend that the type was never defined, so we only get the
25202 member DIEs needed by later specification DIEs. */
25203
25204 static void
25205 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25206 enum debug_info_usage usage)
25207 {
25208 if (TREE_ASM_WRITTEN (type))
25209 {
25210 /* Fill in the bound of variable-length fields in late dwarf if
25211 still incomplete. */
25212 if (!early_dwarf && variably_modified_type_p (type, NULL))
25213 for (tree member = TYPE_FIELDS (type);
25214 member;
25215 member = DECL_CHAIN (member))
25216 fill_variable_array_bounds (TREE_TYPE (member));
25217 return;
25218 }
25219
25220 dw_die_ref type_die = lookup_type_die (type);
25221 dw_die_ref scope_die = 0;
25222 int nested = 0;
25223 int complete = (TYPE_SIZE (type)
25224 && (! TYPE_STUB_DECL (type)
25225 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25226 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25227 complete = complete && should_emit_struct_debug (type, usage);
25228
25229 if (type_die && ! complete)
25230 return;
25231
25232 if (TYPE_CONTEXT (type) != NULL_TREE
25233 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25234 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25235 nested = 1;
25236
25237 scope_die = scope_die_for (type, context_die);
25238
25239 /* Generate child dies for template paramaters. */
25240 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25241 schedule_generic_params_dies_gen (type);
25242
25243 if (! type_die || (nested && is_cu_die (scope_die)))
25244 /* First occurrence of type or toplevel definition of nested class. */
25245 {
25246 dw_die_ref old_die = type_die;
25247
25248 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25249 ? record_type_tag (type) : DW_TAG_union_type,
25250 scope_die, type);
25251 equate_type_number_to_die (type, type_die);
25252 if (old_die)
25253 add_AT_specification (type_die, old_die);
25254 else
25255 add_name_attribute (type_die, type_tag (type));
25256 }
25257 else
25258 remove_AT (type_die, DW_AT_declaration);
25259
25260 /* If this type has been completed, then give it a byte_size attribute and
25261 then give a list of members. */
25262 if (complete && !ns_decl)
25263 {
25264 /* Prevent infinite recursion in cases where the type of some member of
25265 this type is expressed in terms of this type itself. */
25266 TREE_ASM_WRITTEN (type) = 1;
25267 add_byte_size_attribute (type_die, type);
25268 add_alignment_attribute (type_die, type);
25269 if (TYPE_STUB_DECL (type) != NULL_TREE)
25270 {
25271 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25272 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25273 }
25274
25275 /* If the first reference to this type was as the return type of an
25276 inline function, then it may not have a parent. Fix this now. */
25277 if (type_die->die_parent == NULL)
25278 add_child_die (scope_die, type_die);
25279
25280 gen_member_die (type, type_die);
25281
25282 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25283 if (TYPE_ARTIFICIAL (type))
25284 add_AT_flag (type_die, DW_AT_artificial, 1);
25285
25286 /* GNU extension: Record what type our vtable lives in. */
25287 if (TYPE_VFIELD (type))
25288 {
25289 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25290
25291 gen_type_die (vtype, context_die);
25292 add_AT_die_ref (type_die, DW_AT_containing_type,
25293 lookup_type_die (vtype));
25294 }
25295 }
25296 else
25297 {
25298 add_AT_flag (type_die, DW_AT_declaration, 1);
25299
25300 /* We don't need to do this for function-local types. */
25301 if (TYPE_STUB_DECL (type)
25302 && ! decl_function_context (TYPE_STUB_DECL (type)))
25303 vec_safe_push (incomplete_types, type);
25304 }
25305
25306 if (get_AT (type_die, DW_AT_name))
25307 add_pubtype (type, type_die);
25308 }
25309
25310 /* Generate a DIE for a subroutine _type_. */
25311
25312 static void
25313 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25314 {
25315 tree return_type = TREE_TYPE (type);
25316 dw_die_ref subr_die
25317 = new_die (DW_TAG_subroutine_type,
25318 scope_die_for (type, context_die), type);
25319
25320 equate_type_number_to_die (type, subr_die);
25321 add_prototyped_attribute (subr_die, type);
25322 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25323 context_die);
25324 add_alignment_attribute (subr_die, type);
25325 gen_formal_types_die (type, subr_die);
25326
25327 if (get_AT (subr_die, DW_AT_name))
25328 add_pubtype (type, subr_die);
25329 if ((dwarf_version >= 5 || !dwarf_strict)
25330 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25331 add_AT_flag (subr_die, DW_AT_reference, 1);
25332 if ((dwarf_version >= 5 || !dwarf_strict)
25333 && lang_hooks.types.type_dwarf_attribute (type,
25334 DW_AT_rvalue_reference) != -1)
25335 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25336 }
25337
25338 /* Generate a DIE for a type definition. */
25339
25340 static void
25341 gen_typedef_die (tree decl, dw_die_ref context_die)
25342 {
25343 dw_die_ref type_die;
25344 tree type;
25345
25346 if (TREE_ASM_WRITTEN (decl))
25347 {
25348 if (DECL_ORIGINAL_TYPE (decl))
25349 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25350 return;
25351 }
25352
25353 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25354 checks in process_scope_var and modified_type_die), this should be called
25355 only for original types. */
25356 gcc_assert (decl_ultimate_origin (decl) == NULL
25357 || decl_ultimate_origin (decl) == decl);
25358
25359 TREE_ASM_WRITTEN (decl) = 1;
25360 type_die = new_die (DW_TAG_typedef, context_die, decl);
25361
25362 add_name_and_src_coords_attributes (type_die, decl);
25363 if (DECL_ORIGINAL_TYPE (decl))
25364 {
25365 type = DECL_ORIGINAL_TYPE (decl);
25366 if (type == error_mark_node)
25367 return;
25368
25369 gcc_assert (type != TREE_TYPE (decl));
25370 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25371 }
25372 else
25373 {
25374 type = TREE_TYPE (decl);
25375 if (type == error_mark_node)
25376 return;
25377
25378 if (is_naming_typedef_decl (TYPE_NAME (type)))
25379 {
25380 /* Here, we are in the case of decl being a typedef naming
25381 an anonymous type, e.g:
25382 typedef struct {...} foo;
25383 In that case TREE_TYPE (decl) is not a typedef variant
25384 type and TYPE_NAME of the anonymous type is set to the
25385 TYPE_DECL of the typedef. This construct is emitted by
25386 the C++ FE.
25387
25388 TYPE is the anonymous struct named by the typedef
25389 DECL. As we need the DW_AT_type attribute of the
25390 DW_TAG_typedef to point to the DIE of TYPE, let's
25391 generate that DIE right away. add_type_attribute
25392 called below will then pick (via lookup_type_die) that
25393 anonymous struct DIE. */
25394 if (!TREE_ASM_WRITTEN (type))
25395 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25396
25397 /* This is a GNU Extension. We are adding a
25398 DW_AT_linkage_name attribute to the DIE of the
25399 anonymous struct TYPE. The value of that attribute
25400 is the name of the typedef decl naming the anonymous
25401 struct. This greatly eases the work of consumers of
25402 this debug info. */
25403 add_linkage_name_raw (lookup_type_die (type), decl);
25404 }
25405 }
25406
25407 add_type_attribute (type_die, type, decl_quals (decl), false,
25408 context_die);
25409
25410 if (is_naming_typedef_decl (decl))
25411 /* We want that all subsequent calls to lookup_type_die with
25412 TYPE in argument yield the DW_TAG_typedef we have just
25413 created. */
25414 equate_type_number_to_die (type, type_die);
25415
25416 add_alignment_attribute (type_die, TREE_TYPE (decl));
25417
25418 add_accessibility_attribute (type_die, decl);
25419
25420 if (DECL_ABSTRACT_P (decl))
25421 equate_decl_number_to_die (decl, type_die);
25422
25423 if (get_AT (type_die, DW_AT_name))
25424 add_pubtype (decl, type_die);
25425 }
25426
25427 /* Generate a DIE for a struct, class, enum or union type. */
25428
25429 static void
25430 gen_tagged_type_die (tree type,
25431 dw_die_ref context_die,
25432 enum debug_info_usage usage)
25433 {
25434 if (type == NULL_TREE
25435 || !is_tagged_type (type))
25436 return;
25437
25438 if (TREE_ASM_WRITTEN (type))
25439 ;
25440 /* If this is a nested type whose containing class hasn't been written
25441 out yet, writing it out will cover this one, too. This does not apply
25442 to instantiations of member class templates; they need to be added to
25443 the containing class as they are generated. FIXME: This hurts the
25444 idea of combining type decls from multiple TUs, since we can't predict
25445 what set of template instantiations we'll get. */
25446 else if (TYPE_CONTEXT (type)
25447 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25448 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25449 {
25450 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25451
25452 if (TREE_ASM_WRITTEN (type))
25453 return;
25454
25455 /* If that failed, attach ourselves to the stub. */
25456 context_die = lookup_type_die (TYPE_CONTEXT (type));
25457 }
25458 else if (TYPE_CONTEXT (type) != NULL_TREE
25459 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25460 {
25461 /* If this type is local to a function that hasn't been written
25462 out yet, use a NULL context for now; it will be fixed up in
25463 decls_for_scope. */
25464 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25465 /* A declaration DIE doesn't count; nested types need to go in the
25466 specification. */
25467 if (context_die && is_declaration_die (context_die))
25468 context_die = NULL;
25469 }
25470 else
25471 context_die = declare_in_namespace (type, context_die);
25472
25473 if (TREE_CODE (type) == ENUMERAL_TYPE)
25474 {
25475 /* This might have been written out by the call to
25476 declare_in_namespace. */
25477 if (!TREE_ASM_WRITTEN (type))
25478 gen_enumeration_type_die (type, context_die);
25479 }
25480 else
25481 gen_struct_or_union_type_die (type, context_die, usage);
25482
25483 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25484 it up if it is ever completed. gen_*_type_die will set it for us
25485 when appropriate. */
25486 }
25487
25488 /* Generate a type description DIE. */
25489
25490 static void
25491 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25492 enum debug_info_usage usage)
25493 {
25494 struct array_descr_info info;
25495
25496 if (type == NULL_TREE || type == error_mark_node)
25497 return;
25498
25499 if (flag_checking && type)
25500 verify_type (type);
25501
25502 if (TYPE_NAME (type) != NULL_TREE
25503 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25504 && is_redundant_typedef (TYPE_NAME (type))
25505 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25506 /* The DECL of this type is a typedef we don't want to emit debug
25507 info for but we want debug info for its underlying typedef.
25508 This can happen for e.g, the injected-class-name of a C++
25509 type. */
25510 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25511
25512 /* If TYPE is a typedef type variant, let's generate debug info
25513 for the parent typedef which TYPE is a type of. */
25514 if (typedef_variant_p (type))
25515 {
25516 if (TREE_ASM_WRITTEN (type))
25517 return;
25518
25519 tree name = TYPE_NAME (type);
25520 tree origin = decl_ultimate_origin (name);
25521 if (origin != NULL && origin != name)
25522 {
25523 gen_decl_die (origin, NULL, NULL, context_die);
25524 return;
25525 }
25526
25527 /* Prevent broken recursion; we can't hand off to the same type. */
25528 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25529
25530 /* Give typedefs the right scope. */
25531 context_die = scope_die_for (type, context_die);
25532
25533 TREE_ASM_WRITTEN (type) = 1;
25534
25535 gen_decl_die (name, NULL, NULL, context_die);
25536 return;
25537 }
25538
25539 /* If type is an anonymous tagged type named by a typedef, let's
25540 generate debug info for the typedef. */
25541 if (is_naming_typedef_decl (TYPE_NAME (type)))
25542 {
25543 /* Give typedefs the right scope. */
25544 context_die = scope_die_for (type, context_die);
25545
25546 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25547 return;
25548 }
25549
25550 if (lang_hooks.types.get_debug_type)
25551 {
25552 tree debug_type = lang_hooks.types.get_debug_type (type);
25553
25554 if (debug_type != NULL_TREE && debug_type != type)
25555 {
25556 gen_type_die_with_usage (debug_type, context_die, usage);
25557 return;
25558 }
25559 }
25560
25561 /* We are going to output a DIE to represent the unqualified version
25562 of this type (i.e. without any const or volatile qualifiers) so
25563 get the main variant (i.e. the unqualified version) of this type
25564 now. (Vectors and arrays are special because the debugging info is in the
25565 cloned type itself. Similarly function/method types can contain extra
25566 ref-qualification). */
25567 if (TREE_CODE (type) == FUNCTION_TYPE
25568 || TREE_CODE (type) == METHOD_TYPE)
25569 {
25570 /* For function/method types, can't use type_main_variant here,
25571 because that can have different ref-qualifiers for C++,
25572 but try to canonicalize. */
25573 tree main = TYPE_MAIN_VARIANT (type);
25574 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25575 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25576 && check_base_type (t, main)
25577 && check_lang_type (t, type))
25578 {
25579 type = t;
25580 break;
25581 }
25582 }
25583 else if (TREE_CODE (type) != VECTOR_TYPE
25584 && TREE_CODE (type) != ARRAY_TYPE)
25585 type = type_main_variant (type);
25586
25587 /* If this is an array type with hidden descriptor, handle it first. */
25588 if (!TREE_ASM_WRITTEN (type)
25589 && lang_hooks.types.get_array_descr_info)
25590 {
25591 memset (&info, 0, sizeof (info));
25592 if (lang_hooks.types.get_array_descr_info (type, &info))
25593 {
25594 /* Fortran sometimes emits array types with no dimension. */
25595 gcc_assert (info.ndimensions >= 0
25596 && (info.ndimensions
25597 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25598 gen_descr_array_type_die (type, &info, context_die);
25599 TREE_ASM_WRITTEN (type) = 1;
25600 return;
25601 }
25602 }
25603
25604 if (TREE_ASM_WRITTEN (type))
25605 {
25606 /* Variable-length types may be incomplete even if
25607 TREE_ASM_WRITTEN. For such types, fall through to
25608 gen_array_type_die() and possibly fill in
25609 DW_AT_{upper,lower}_bound attributes. */
25610 if ((TREE_CODE (type) != ARRAY_TYPE
25611 && TREE_CODE (type) != RECORD_TYPE
25612 && TREE_CODE (type) != UNION_TYPE
25613 && TREE_CODE (type) != QUAL_UNION_TYPE)
25614 || !variably_modified_type_p (type, NULL))
25615 return;
25616 }
25617
25618 switch (TREE_CODE (type))
25619 {
25620 case ERROR_MARK:
25621 break;
25622
25623 case POINTER_TYPE:
25624 case REFERENCE_TYPE:
25625 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25626 ensures that the gen_type_die recursion will terminate even if the
25627 type is recursive. Recursive types are possible in Ada. */
25628 /* ??? We could perhaps do this for all types before the switch
25629 statement. */
25630 TREE_ASM_WRITTEN (type) = 1;
25631
25632 /* For these types, all that is required is that we output a DIE (or a
25633 set of DIEs) to represent the "basis" type. */
25634 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25635 DINFO_USAGE_IND_USE);
25636 break;
25637
25638 case OFFSET_TYPE:
25639 /* This code is used for C++ pointer-to-data-member types.
25640 Output a description of the relevant class type. */
25641 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25642 DINFO_USAGE_IND_USE);
25643
25644 /* Output a description of the type of the object pointed to. */
25645 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25646 DINFO_USAGE_IND_USE);
25647
25648 /* Now output a DIE to represent this pointer-to-data-member type
25649 itself. */
25650 gen_ptr_to_mbr_type_die (type, context_die);
25651 break;
25652
25653 case FUNCTION_TYPE:
25654 /* Force out return type (in case it wasn't forced out already). */
25655 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25656 DINFO_USAGE_DIR_USE);
25657 gen_subroutine_type_die (type, context_die);
25658 break;
25659
25660 case METHOD_TYPE:
25661 /* Force out return type (in case it wasn't forced out already). */
25662 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25663 DINFO_USAGE_DIR_USE);
25664 gen_subroutine_type_die (type, context_die);
25665 break;
25666
25667 case ARRAY_TYPE:
25668 case VECTOR_TYPE:
25669 gen_array_type_die (type, context_die);
25670 break;
25671
25672 case ENUMERAL_TYPE:
25673 case RECORD_TYPE:
25674 case UNION_TYPE:
25675 case QUAL_UNION_TYPE:
25676 gen_tagged_type_die (type, context_die, usage);
25677 return;
25678
25679 case VOID_TYPE:
25680 case INTEGER_TYPE:
25681 case REAL_TYPE:
25682 case FIXED_POINT_TYPE:
25683 case COMPLEX_TYPE:
25684 case BOOLEAN_TYPE:
25685 /* No DIEs needed for fundamental types. */
25686 break;
25687
25688 case NULLPTR_TYPE:
25689 case LANG_TYPE:
25690 /* Just use DW_TAG_unspecified_type. */
25691 {
25692 dw_die_ref type_die = lookup_type_die (type);
25693 if (type_die == NULL)
25694 {
25695 tree name = TYPE_IDENTIFIER (type);
25696 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25697 type);
25698 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25699 equate_type_number_to_die (type, type_die);
25700 }
25701 }
25702 break;
25703
25704 default:
25705 if (is_cxx_auto (type))
25706 {
25707 tree name = TYPE_IDENTIFIER (type);
25708 dw_die_ref *die = (name == get_identifier ("auto")
25709 ? &auto_die : &decltype_auto_die);
25710 if (!*die)
25711 {
25712 *die = new_die (DW_TAG_unspecified_type,
25713 comp_unit_die (), NULL_TREE);
25714 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25715 }
25716 equate_type_number_to_die (type, *die);
25717 break;
25718 }
25719 gcc_unreachable ();
25720 }
25721
25722 TREE_ASM_WRITTEN (type) = 1;
25723 }
25724
25725 static void
25726 gen_type_die (tree type, dw_die_ref context_die)
25727 {
25728 if (type != error_mark_node)
25729 {
25730 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25731 if (flag_checking)
25732 {
25733 dw_die_ref die = lookup_type_die (type);
25734 if (die)
25735 check_die (die);
25736 }
25737 }
25738 }
25739
25740 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25741 things which are local to the given block. */
25742
25743 static void
25744 gen_block_die (tree stmt, dw_die_ref context_die)
25745 {
25746 int must_output_die = 0;
25747 bool inlined_func;
25748
25749 /* Ignore blocks that are NULL. */
25750 if (stmt == NULL_TREE)
25751 return;
25752
25753 inlined_func = inlined_function_outer_scope_p (stmt);
25754
25755 /* If the block is one fragment of a non-contiguous block, do not
25756 process the variables, since they will have been done by the
25757 origin block. Do process subblocks. */
25758 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25759 {
25760 tree sub;
25761
25762 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25763 gen_block_die (sub, context_die);
25764
25765 return;
25766 }
25767
25768 /* Determine if we need to output any Dwarf DIEs at all to represent this
25769 block. */
25770 if (inlined_func)
25771 /* The outer scopes for inlinings *must* always be represented. We
25772 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25773 must_output_die = 1;
25774 else if (lookup_block_die (stmt))
25775 /* If we already have a DIE then it was filled early. Meanwhile
25776 we might have pruned all BLOCK_VARS as optimized out but we
25777 still want to generate high/low PC attributes so output it. */
25778 must_output_die = 1;
25779 else if (TREE_USED (stmt)
25780 || TREE_ASM_WRITTEN (stmt))
25781 {
25782 /* Determine if this block directly contains any "significant"
25783 local declarations which we will need to output DIEs for. */
25784 if (debug_info_level > DINFO_LEVEL_TERSE)
25785 {
25786 /* We are not in terse mode so any local declaration that
25787 is not ignored for debug purposes counts as being a
25788 "significant" one. */
25789 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25790 must_output_die = 1;
25791 else
25792 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25793 if (!DECL_IGNORED_P (var))
25794 {
25795 must_output_die = 1;
25796 break;
25797 }
25798 }
25799 else if (!dwarf2out_ignore_block (stmt))
25800 must_output_die = 1;
25801 }
25802
25803 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25804 DIE for any block which contains no significant local declarations at
25805 all. Rather, in such cases we just call `decls_for_scope' so that any
25806 needed Dwarf info for any sub-blocks will get properly generated. Note
25807 that in terse mode, our definition of what constitutes a "significant"
25808 local declaration gets restricted to include only inlined function
25809 instances and local (nested) function definitions. */
25810 if (must_output_die)
25811 {
25812 if (inlined_func)
25813 gen_inlined_subroutine_die (stmt, context_die);
25814 else
25815 gen_lexical_block_die (stmt, context_die);
25816 }
25817 else
25818 decls_for_scope (stmt, context_die);
25819 }
25820
25821 /* Process variable DECL (or variable with origin ORIGIN) within
25822 block STMT and add it to CONTEXT_DIE. */
25823 static void
25824 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25825 {
25826 dw_die_ref die;
25827 tree decl_or_origin = decl ? decl : origin;
25828
25829 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25830 die = lookup_decl_die (decl_or_origin);
25831 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25832 {
25833 if (TYPE_DECL_IS_STUB (decl_or_origin))
25834 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25835 else
25836 die = lookup_decl_die (decl_or_origin);
25837 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25838 if (! die && ! early_dwarf)
25839 return;
25840 }
25841 else
25842 die = NULL;
25843
25844 /* Avoid creating DIEs for local typedefs and concrete static variables that
25845 will only be pruned later. */
25846 if ((origin || decl_ultimate_origin (decl))
25847 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25848 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25849 {
25850 origin = decl_ultimate_origin (decl_or_origin);
25851 if (decl && VAR_P (decl) && die != NULL)
25852 {
25853 die = lookup_decl_die (origin);
25854 if (die != NULL)
25855 equate_decl_number_to_die (decl, die);
25856 }
25857 return;
25858 }
25859
25860 if (die != NULL && die->die_parent == NULL)
25861 add_child_die (context_die, die);
25862 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25863 {
25864 if (early_dwarf)
25865 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25866 stmt, context_die);
25867 }
25868 else
25869 {
25870 if (decl && DECL_P (decl))
25871 {
25872 die = lookup_decl_die (decl);
25873
25874 /* Early created DIEs do not have a parent as the decls refer
25875 to the function as DECL_CONTEXT rather than the BLOCK. */
25876 if (die && die->die_parent == NULL)
25877 {
25878 gcc_assert (in_lto_p);
25879 add_child_die (context_die, die);
25880 }
25881 }
25882
25883 gen_decl_die (decl, origin, NULL, context_die);
25884 }
25885 }
25886
25887 /* Generate all of the decls declared within a given scope and (recursively)
25888 all of its sub-blocks. */
25889
25890 static void
25891 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25892 {
25893 tree decl;
25894 unsigned int i;
25895 tree subblocks;
25896
25897 /* Ignore NULL blocks. */
25898 if (stmt == NULL_TREE)
25899 return;
25900
25901 /* Output the DIEs to represent all of the data objects and typedefs
25902 declared directly within this block but not within any nested
25903 sub-blocks. Also, nested function and tag DIEs have been
25904 generated with a parent of NULL; fix that up now. We don't
25905 have to do this if we're at -g1. */
25906 if (debug_info_level > DINFO_LEVEL_TERSE)
25907 {
25908 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25909 process_scope_var (stmt, decl, NULL_TREE, context_die);
25910 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25911 origin - avoid doing this twice as we have no good way to see
25912 if we've done it once already. */
25913 if (! early_dwarf)
25914 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25915 {
25916 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25917 if (decl == current_function_decl)
25918 /* Ignore declarations of the current function, while they
25919 are declarations, gen_subprogram_die would treat them
25920 as definitions again, because they are equal to
25921 current_function_decl and endlessly recurse. */;
25922 else if (TREE_CODE (decl) == FUNCTION_DECL)
25923 process_scope_var (stmt, decl, NULL_TREE, context_die);
25924 else
25925 process_scope_var (stmt, NULL_TREE, decl, context_die);
25926 }
25927 }
25928
25929 /* Even if we're at -g1, we need to process the subblocks in order to get
25930 inlined call information. */
25931
25932 /* Output the DIEs to represent all sub-blocks (and the items declared
25933 therein) of this block. */
25934 if (recurse)
25935 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25936 subblocks != NULL;
25937 subblocks = BLOCK_CHAIN (subblocks))
25938 gen_block_die (subblocks, context_die);
25939 }
25940
25941 /* Is this a typedef we can avoid emitting? */
25942
25943 static bool
25944 is_redundant_typedef (const_tree decl)
25945 {
25946 if (TYPE_DECL_IS_STUB (decl))
25947 return true;
25948
25949 if (DECL_ARTIFICIAL (decl)
25950 && DECL_CONTEXT (decl)
25951 && is_tagged_type (DECL_CONTEXT (decl))
25952 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25953 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25954 /* Also ignore the artificial member typedef for the class name. */
25955 return true;
25956
25957 return false;
25958 }
25959
25960 /* Return TRUE if TYPE is a typedef that names a type for linkage
25961 purposes. This kind of typedefs is produced by the C++ FE for
25962 constructs like:
25963
25964 typedef struct {...} foo;
25965
25966 In that case, there is no typedef variant type produced for foo.
25967 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25968 struct type. */
25969
25970 static bool
25971 is_naming_typedef_decl (const_tree decl)
25972 {
25973 if (decl == NULL_TREE
25974 || TREE_CODE (decl) != TYPE_DECL
25975 || DECL_NAMELESS (decl)
25976 || !is_tagged_type (TREE_TYPE (decl))
25977 || DECL_IS_BUILTIN (decl)
25978 || is_redundant_typedef (decl)
25979 /* It looks like Ada produces TYPE_DECLs that are very similar
25980 to C++ naming typedefs but that have different
25981 semantics. Let's be specific to c++ for now. */
25982 || !is_cxx (decl))
25983 return FALSE;
25984
25985 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25986 && TYPE_NAME (TREE_TYPE (decl)) == decl
25987 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25988 != TYPE_NAME (TREE_TYPE (decl))));
25989 }
25990
25991 /* Looks up the DIE for a context. */
25992
25993 static inline dw_die_ref
25994 lookup_context_die (tree context)
25995 {
25996 if (context)
25997 {
25998 /* Find die that represents this context. */
25999 if (TYPE_P (context))
26000 {
26001 context = TYPE_MAIN_VARIANT (context);
26002 dw_die_ref ctx = lookup_type_die (context);
26003 if (!ctx)
26004 return NULL;
26005 return strip_naming_typedef (context, ctx);
26006 }
26007 else
26008 return lookup_decl_die (context);
26009 }
26010 return comp_unit_die ();
26011 }
26012
26013 /* Returns the DIE for a context. */
26014
26015 static inline dw_die_ref
26016 get_context_die (tree context)
26017 {
26018 if (context)
26019 {
26020 /* Find die that represents this context. */
26021 if (TYPE_P (context))
26022 {
26023 context = TYPE_MAIN_VARIANT (context);
26024 return strip_naming_typedef (context, force_type_die (context));
26025 }
26026 else
26027 return force_decl_die (context);
26028 }
26029 return comp_unit_die ();
26030 }
26031
26032 /* Returns the DIE for decl. A DIE will always be returned. */
26033
26034 static dw_die_ref
26035 force_decl_die (tree decl)
26036 {
26037 dw_die_ref decl_die;
26038 unsigned saved_external_flag;
26039 tree save_fn = NULL_TREE;
26040 decl_die = lookup_decl_die (decl);
26041 if (!decl_die)
26042 {
26043 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26044
26045 decl_die = lookup_decl_die (decl);
26046 if (decl_die)
26047 return decl_die;
26048
26049 switch (TREE_CODE (decl))
26050 {
26051 case FUNCTION_DECL:
26052 /* Clear current_function_decl, so that gen_subprogram_die thinks
26053 that this is a declaration. At this point, we just want to force
26054 declaration die. */
26055 save_fn = current_function_decl;
26056 current_function_decl = NULL_TREE;
26057 gen_subprogram_die (decl, context_die);
26058 current_function_decl = save_fn;
26059 break;
26060
26061 case VAR_DECL:
26062 /* Set external flag to force declaration die. Restore it after
26063 gen_decl_die() call. */
26064 saved_external_flag = DECL_EXTERNAL (decl);
26065 DECL_EXTERNAL (decl) = 1;
26066 gen_decl_die (decl, NULL, NULL, context_die);
26067 DECL_EXTERNAL (decl) = saved_external_flag;
26068 break;
26069
26070 case NAMESPACE_DECL:
26071 if (dwarf_version >= 3 || !dwarf_strict)
26072 dwarf2out_decl (decl);
26073 else
26074 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26075 decl_die = comp_unit_die ();
26076 break;
26077
26078 case TRANSLATION_UNIT_DECL:
26079 decl_die = comp_unit_die ();
26080 break;
26081
26082 default:
26083 gcc_unreachable ();
26084 }
26085
26086 /* We should be able to find the DIE now. */
26087 if (!decl_die)
26088 decl_die = lookup_decl_die (decl);
26089 gcc_assert (decl_die);
26090 }
26091
26092 return decl_die;
26093 }
26094
26095 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26096 always returned. */
26097
26098 static dw_die_ref
26099 force_type_die (tree type)
26100 {
26101 dw_die_ref type_die;
26102
26103 type_die = lookup_type_die (type);
26104 if (!type_die)
26105 {
26106 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26107
26108 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26109 false, context_die);
26110 gcc_assert (type_die);
26111 }
26112 return type_die;
26113 }
26114
26115 /* Force out any required namespaces to be able to output DECL,
26116 and return the new context_die for it, if it's changed. */
26117
26118 static dw_die_ref
26119 setup_namespace_context (tree thing, dw_die_ref context_die)
26120 {
26121 tree context = (DECL_P (thing)
26122 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26123 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26124 /* Force out the namespace. */
26125 context_die = force_decl_die (context);
26126
26127 return context_die;
26128 }
26129
26130 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26131 type) within its namespace, if appropriate.
26132
26133 For compatibility with older debuggers, namespace DIEs only contain
26134 declarations; all definitions are emitted at CU scope, with
26135 DW_AT_specification pointing to the declaration (like with class
26136 members). */
26137
26138 static dw_die_ref
26139 declare_in_namespace (tree thing, dw_die_ref context_die)
26140 {
26141 dw_die_ref ns_context;
26142
26143 if (debug_info_level <= DINFO_LEVEL_TERSE)
26144 return context_die;
26145
26146 /* External declarations in the local scope only need to be emitted
26147 once, not once in the namespace and once in the scope.
26148
26149 This avoids declaring the `extern' below in the
26150 namespace DIE as well as in the innermost scope:
26151
26152 namespace S
26153 {
26154 int i=5;
26155 int foo()
26156 {
26157 int i=8;
26158 extern int i;
26159 return i;
26160 }
26161 }
26162 */
26163 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26164 return context_die;
26165
26166 /* If this decl is from an inlined function, then don't try to emit it in its
26167 namespace, as we will get confused. It would have already been emitted
26168 when the abstract instance of the inline function was emitted anyways. */
26169 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26170 return context_die;
26171
26172 ns_context = setup_namespace_context (thing, context_die);
26173
26174 if (ns_context != context_die)
26175 {
26176 if (is_fortran () || is_dlang ())
26177 return ns_context;
26178 if (DECL_P (thing))
26179 gen_decl_die (thing, NULL, NULL, ns_context);
26180 else
26181 gen_type_die (thing, ns_context);
26182 }
26183 return context_die;
26184 }
26185
26186 /* Generate a DIE for a namespace or namespace alias. */
26187
26188 static void
26189 gen_namespace_die (tree decl, dw_die_ref context_die)
26190 {
26191 dw_die_ref namespace_die;
26192
26193 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26194 they are an alias of. */
26195 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26196 {
26197 /* Output a real namespace or module. */
26198 context_die = setup_namespace_context (decl, comp_unit_die ());
26199 namespace_die = new_die (is_fortran () || is_dlang ()
26200 ? DW_TAG_module : DW_TAG_namespace,
26201 context_die, decl);
26202 /* For Fortran modules defined in different CU don't add src coords. */
26203 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26204 {
26205 const char *name = dwarf2_name (decl, 0);
26206 if (name)
26207 add_name_attribute (namespace_die, name);
26208 }
26209 else
26210 add_name_and_src_coords_attributes (namespace_die, decl);
26211 if (DECL_EXTERNAL (decl))
26212 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26213 equate_decl_number_to_die (decl, namespace_die);
26214 }
26215 else
26216 {
26217 /* Output a namespace alias. */
26218
26219 /* Force out the namespace we are an alias of, if necessary. */
26220 dw_die_ref origin_die
26221 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26222
26223 if (DECL_FILE_SCOPE_P (decl)
26224 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26225 context_die = setup_namespace_context (decl, comp_unit_die ());
26226 /* Now create the namespace alias DIE. */
26227 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26228 add_name_and_src_coords_attributes (namespace_die, decl);
26229 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26230 equate_decl_number_to_die (decl, namespace_die);
26231 }
26232 if ((dwarf_version >= 5 || !dwarf_strict)
26233 && lang_hooks.decls.decl_dwarf_attribute (decl,
26234 DW_AT_export_symbols) == 1)
26235 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26236
26237 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26238 if (want_pubnames ())
26239 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26240 }
26241
26242 /* Generate Dwarf debug information for a decl described by DECL.
26243 The return value is currently only meaningful for PARM_DECLs,
26244 for all other decls it returns NULL.
26245
26246 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26247 It can be NULL otherwise. */
26248
26249 static dw_die_ref
26250 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26251 dw_die_ref context_die)
26252 {
26253 tree decl_or_origin = decl ? decl : origin;
26254 tree class_origin = NULL, ultimate_origin;
26255
26256 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26257 return NULL;
26258
26259 switch (TREE_CODE (decl_or_origin))
26260 {
26261 case ERROR_MARK:
26262 break;
26263
26264 case CONST_DECL:
26265 if (!is_fortran () && !is_ada () && !is_dlang ())
26266 {
26267 /* The individual enumerators of an enum type get output when we output
26268 the Dwarf representation of the relevant enum type itself. */
26269 break;
26270 }
26271
26272 /* Emit its type. */
26273 gen_type_die (TREE_TYPE (decl), context_die);
26274
26275 /* And its containing namespace. */
26276 context_die = declare_in_namespace (decl, context_die);
26277
26278 gen_const_die (decl, context_die);
26279 break;
26280
26281 case FUNCTION_DECL:
26282 #if 0
26283 /* FIXME */
26284 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26285 on local redeclarations of global functions. That seems broken. */
26286 if (current_function_decl != decl)
26287 /* This is only a declaration. */;
26288 #endif
26289
26290 /* We should have abstract copies already and should not generate
26291 stray type DIEs in late LTO dumping. */
26292 if (! early_dwarf)
26293 ;
26294
26295 /* If we're emitting a clone, emit info for the abstract instance. */
26296 else if (origin || DECL_ORIGIN (decl) != decl)
26297 dwarf2out_abstract_function (origin
26298 ? DECL_ORIGIN (origin)
26299 : DECL_ABSTRACT_ORIGIN (decl));
26300
26301 /* If we're emitting a possibly inlined function emit it as
26302 abstract instance. */
26303 else if (cgraph_function_possibly_inlined_p (decl)
26304 && ! DECL_ABSTRACT_P (decl)
26305 && ! class_or_namespace_scope_p (context_die)
26306 /* dwarf2out_abstract_function won't emit a die if this is just
26307 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26308 that case, because that works only if we have a die. */
26309 && DECL_INITIAL (decl) != NULL_TREE)
26310 dwarf2out_abstract_function (decl);
26311
26312 /* Otherwise we're emitting the primary DIE for this decl. */
26313 else if (debug_info_level > DINFO_LEVEL_TERSE)
26314 {
26315 /* Before we describe the FUNCTION_DECL itself, make sure that we
26316 have its containing type. */
26317 if (!origin)
26318 origin = decl_class_context (decl);
26319 if (origin != NULL_TREE)
26320 gen_type_die (origin, context_die);
26321
26322 /* And its return type. */
26323 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26324
26325 /* And its virtual context. */
26326 if (DECL_VINDEX (decl) != NULL_TREE)
26327 gen_type_die (DECL_CONTEXT (decl), context_die);
26328
26329 /* Make sure we have a member DIE for decl. */
26330 if (origin != NULL_TREE)
26331 gen_type_die_for_member (origin, decl, context_die);
26332
26333 /* And its containing namespace. */
26334 context_die = declare_in_namespace (decl, context_die);
26335 }
26336
26337 /* Now output a DIE to represent the function itself. */
26338 if (decl)
26339 gen_subprogram_die (decl, context_die);
26340 break;
26341
26342 case TYPE_DECL:
26343 /* If we are in terse mode, don't generate any DIEs to represent any
26344 actual typedefs. */
26345 if (debug_info_level <= DINFO_LEVEL_TERSE)
26346 break;
26347
26348 /* In the special case of a TYPE_DECL node representing the declaration
26349 of some type tag, if the given TYPE_DECL is marked as having been
26350 instantiated from some other (original) TYPE_DECL node (e.g. one which
26351 was generated within the original definition of an inline function) we
26352 used to generate a special (abbreviated) DW_TAG_structure_type,
26353 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26354 should be actually referencing those DIEs, as variable DIEs with that
26355 type would be emitted already in the abstract origin, so it was always
26356 removed during unused type prunning. Don't add anything in this
26357 case. */
26358 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26359 break;
26360
26361 if (is_redundant_typedef (decl))
26362 gen_type_die (TREE_TYPE (decl), context_die);
26363 else
26364 /* Output a DIE to represent the typedef itself. */
26365 gen_typedef_die (decl, context_die);
26366 break;
26367
26368 case LABEL_DECL:
26369 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26370 gen_label_die (decl, context_die);
26371 break;
26372
26373 case VAR_DECL:
26374 case RESULT_DECL:
26375 /* If we are in terse mode, don't generate any DIEs to represent any
26376 variable declarations or definitions unless it is external. */
26377 if (debug_info_level < DINFO_LEVEL_TERSE
26378 || (debug_info_level == DINFO_LEVEL_TERSE
26379 && !TREE_PUBLIC (decl_or_origin)))
26380 break;
26381
26382 if (debug_info_level > DINFO_LEVEL_TERSE)
26383 {
26384 /* Avoid generating stray type DIEs during late dwarf dumping.
26385 All types have been dumped early. */
26386 if (early_dwarf
26387 /* ??? But in LTRANS we cannot annotate early created variably
26388 modified type DIEs without copying them and adjusting all
26389 references to them. Dump them again as happens for inlining
26390 which copies both the decl and the types. */
26391 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26392 in VLA bound information for example. */
26393 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26394 current_function_decl)))
26395 {
26396 /* Output any DIEs that are needed to specify the type of this data
26397 object. */
26398 if (decl_by_reference_p (decl_or_origin))
26399 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26400 else
26401 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26402 }
26403
26404 if (early_dwarf)
26405 {
26406 /* And its containing type. */
26407 class_origin = decl_class_context (decl_or_origin);
26408 if (class_origin != NULL_TREE)
26409 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26410
26411 /* And its containing namespace. */
26412 context_die = declare_in_namespace (decl_or_origin, context_die);
26413 }
26414 }
26415
26416 /* Now output the DIE to represent the data object itself. This gets
26417 complicated because of the possibility that the VAR_DECL really
26418 represents an inlined instance of a formal parameter for an inline
26419 function. */
26420 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26421 if (ultimate_origin != NULL_TREE
26422 && TREE_CODE (ultimate_origin) == PARM_DECL)
26423 gen_formal_parameter_die (decl, origin,
26424 true /* Emit name attribute. */,
26425 context_die);
26426 else
26427 gen_variable_die (decl, origin, context_die);
26428 break;
26429
26430 case FIELD_DECL:
26431 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26432 /* Ignore the nameless fields that are used to skip bits but handle C++
26433 anonymous unions and structs. */
26434 if (DECL_NAME (decl) != NULL_TREE
26435 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26436 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26437 {
26438 gen_type_die (member_declared_type (decl), context_die);
26439 gen_field_die (decl, ctx, context_die);
26440 }
26441 break;
26442
26443 case PARM_DECL:
26444 /* Avoid generating stray type DIEs during late dwarf dumping.
26445 All types have been dumped early. */
26446 if (early_dwarf
26447 /* ??? But in LTRANS we cannot annotate early created variably
26448 modified type DIEs without copying them and adjusting all
26449 references to them. Dump them again as happens for inlining
26450 which copies both the decl and the types. */
26451 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26452 in VLA bound information for example. */
26453 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26454 current_function_decl)))
26455 {
26456 if (DECL_BY_REFERENCE (decl_or_origin))
26457 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26458 else
26459 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26460 }
26461 return gen_formal_parameter_die (decl, origin,
26462 true /* Emit name attribute. */,
26463 context_die);
26464
26465 case NAMESPACE_DECL:
26466 if (dwarf_version >= 3 || !dwarf_strict)
26467 gen_namespace_die (decl, context_die);
26468 break;
26469
26470 case IMPORTED_DECL:
26471 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26472 DECL_CONTEXT (decl), context_die);
26473 break;
26474
26475 case NAMELIST_DECL:
26476 gen_namelist_decl (DECL_NAME (decl), context_die,
26477 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26478 break;
26479
26480 default:
26481 /* Probably some frontend-internal decl. Assume we don't care. */
26482 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26483 break;
26484 }
26485
26486 return NULL;
26487 }
26488 \f
26489 /* Output initial debug information for global DECL. Called at the
26490 end of the parsing process.
26491
26492 This is the initial debug generation process. As such, the DIEs
26493 generated may be incomplete. A later debug generation pass
26494 (dwarf2out_late_global_decl) will augment the information generated
26495 in this pass (e.g., with complete location info). */
26496
26497 static void
26498 dwarf2out_early_global_decl (tree decl)
26499 {
26500 set_early_dwarf s;
26501
26502 /* gen_decl_die() will set DECL_ABSTRACT because
26503 cgraph_function_possibly_inlined_p() returns true. This is in
26504 turn will cause DW_AT_inline attributes to be set.
26505
26506 This happens because at early dwarf generation, there is no
26507 cgraph information, causing cgraph_function_possibly_inlined_p()
26508 to return true. Trick cgraph_function_possibly_inlined_p()
26509 while we generate dwarf early. */
26510 bool save = symtab->global_info_ready;
26511 symtab->global_info_ready = true;
26512
26513 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26514 other DECLs and they can point to template types or other things
26515 that dwarf2out can't handle when done via dwarf2out_decl. */
26516 if (TREE_CODE (decl) != TYPE_DECL
26517 && TREE_CODE (decl) != PARM_DECL)
26518 {
26519 if (TREE_CODE (decl) == FUNCTION_DECL)
26520 {
26521 tree save_fndecl = current_function_decl;
26522
26523 /* For nested functions, make sure we have DIEs for the parents first
26524 so that all nested DIEs are generated at the proper scope in the
26525 first shot. */
26526 tree context = decl_function_context (decl);
26527 if (context != NULL)
26528 {
26529 dw_die_ref context_die = lookup_decl_die (context);
26530 current_function_decl = context;
26531
26532 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26533 enough so that it lands in its own context. This avoids type
26534 pruning issues later on. */
26535 if (context_die == NULL || is_declaration_die (context_die))
26536 dwarf2out_early_global_decl (context);
26537 }
26538
26539 /* Emit an abstract origin of a function first. This happens
26540 with C++ constructor clones for example and makes
26541 dwarf2out_abstract_function happy which requires the early
26542 DIE of the abstract instance to be present. */
26543 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26544 dw_die_ref origin_die;
26545 if (origin != NULL
26546 /* Do not emit the DIE multiple times but make sure to
26547 process it fully here in case we just saw a declaration. */
26548 && ((origin_die = lookup_decl_die (origin)) == NULL
26549 || is_declaration_die (origin_die)))
26550 {
26551 current_function_decl = origin;
26552 dwarf2out_decl (origin);
26553 }
26554
26555 /* Emit the DIE for decl but avoid doing that multiple times. */
26556 dw_die_ref old_die;
26557 if ((old_die = lookup_decl_die (decl)) == NULL
26558 || is_declaration_die (old_die))
26559 {
26560 current_function_decl = decl;
26561 dwarf2out_decl (decl);
26562 }
26563
26564 current_function_decl = save_fndecl;
26565 }
26566 else
26567 dwarf2out_decl (decl);
26568 }
26569 symtab->global_info_ready = save;
26570 }
26571
26572 /* Return whether EXPR is an expression with the following pattern:
26573 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26574
26575 static bool
26576 is_trivial_indirect_ref (tree expr)
26577 {
26578 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26579 return false;
26580
26581 tree nop = TREE_OPERAND (expr, 0);
26582 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26583 return false;
26584
26585 tree int_cst = TREE_OPERAND (nop, 0);
26586 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26587 }
26588
26589 /* Output debug information for global decl DECL. Called from
26590 toplev.c after compilation proper has finished. */
26591
26592 static void
26593 dwarf2out_late_global_decl (tree decl)
26594 {
26595 /* Fill-in any location information we were unable to determine
26596 on the first pass. */
26597 if (VAR_P (decl))
26598 {
26599 dw_die_ref die = lookup_decl_die (decl);
26600
26601 /* We may have to generate full debug late for LTO in case debug
26602 was not enabled at compile-time or the target doesn't support
26603 the LTO early debug scheme. */
26604 if (! die && in_lto_p)
26605 dwarf2out_decl (decl);
26606 else if (die)
26607 {
26608 /* We get called via the symtab code invoking late_global_decl
26609 for symbols that are optimized out.
26610
26611 Do not add locations for those, except if they have a
26612 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26613 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26614 INDIRECT_REF expression, as this could generate relocations to
26615 text symbols in LTO object files, which is invalid. */
26616 varpool_node *node = varpool_node::get (decl);
26617 if ((! node || ! node->definition)
26618 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26619 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26620 tree_add_const_value_attribute_for_decl (die, decl);
26621 else
26622 add_location_or_const_value_attribute (die, decl, false);
26623 }
26624 }
26625 }
26626
26627 /* Output debug information for type decl DECL. Called from toplev.c
26628 and from language front ends (to record built-in types). */
26629 static void
26630 dwarf2out_type_decl (tree decl, int local)
26631 {
26632 if (!local)
26633 {
26634 set_early_dwarf s;
26635 dwarf2out_decl (decl);
26636 }
26637 }
26638
26639 /* Output debug information for imported module or decl DECL.
26640 NAME is non-NULL name in the lexical block if the decl has been renamed.
26641 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26642 that DECL belongs to.
26643 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26644 static void
26645 dwarf2out_imported_module_or_decl_1 (tree decl,
26646 tree name,
26647 tree lexical_block,
26648 dw_die_ref lexical_block_die)
26649 {
26650 expanded_location xloc;
26651 dw_die_ref imported_die = NULL;
26652 dw_die_ref at_import_die;
26653
26654 if (TREE_CODE (decl) == IMPORTED_DECL)
26655 {
26656 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26657 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26658 gcc_assert (decl);
26659 }
26660 else
26661 xloc = expand_location (input_location);
26662
26663 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26664 {
26665 at_import_die = force_type_die (TREE_TYPE (decl));
26666 /* For namespace N { typedef void T; } using N::T; base_type_die
26667 returns NULL, but DW_TAG_imported_declaration requires
26668 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26669 if (!at_import_die)
26670 {
26671 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26672 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26673 at_import_die = lookup_type_die (TREE_TYPE (decl));
26674 gcc_assert (at_import_die);
26675 }
26676 }
26677 else
26678 {
26679 at_import_die = lookup_decl_die (decl);
26680 if (!at_import_die)
26681 {
26682 /* If we're trying to avoid duplicate debug info, we may not have
26683 emitted the member decl for this field. Emit it now. */
26684 if (TREE_CODE (decl) == FIELD_DECL)
26685 {
26686 tree type = DECL_CONTEXT (decl);
26687
26688 if (TYPE_CONTEXT (type)
26689 && TYPE_P (TYPE_CONTEXT (type))
26690 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26691 DINFO_USAGE_DIR_USE))
26692 return;
26693 gen_type_die_for_member (type, decl,
26694 get_context_die (TYPE_CONTEXT (type)));
26695 }
26696 if (TREE_CODE (decl) == NAMELIST_DECL)
26697 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26698 get_context_die (DECL_CONTEXT (decl)),
26699 NULL_TREE);
26700 else
26701 at_import_die = force_decl_die (decl);
26702 }
26703 }
26704
26705 if (TREE_CODE (decl) == NAMESPACE_DECL)
26706 {
26707 if (dwarf_version >= 3 || !dwarf_strict)
26708 imported_die = new_die (DW_TAG_imported_module,
26709 lexical_block_die,
26710 lexical_block);
26711 else
26712 return;
26713 }
26714 else
26715 imported_die = new_die (DW_TAG_imported_declaration,
26716 lexical_block_die,
26717 lexical_block);
26718
26719 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26720 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26721 if (debug_column_info && xloc.column)
26722 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26723 if (name)
26724 add_AT_string (imported_die, DW_AT_name,
26725 IDENTIFIER_POINTER (name));
26726 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26727 }
26728
26729 /* Output debug information for imported module or decl DECL.
26730 NAME is non-NULL name in context if the decl has been renamed.
26731 CHILD is true if decl is one of the renamed decls as part of
26732 importing whole module.
26733 IMPLICIT is set if this hook is called for an implicit import
26734 such as inline namespace. */
26735
26736 static void
26737 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26738 bool child, bool implicit)
26739 {
26740 /* dw_die_ref at_import_die; */
26741 dw_die_ref scope_die;
26742
26743 if (debug_info_level <= DINFO_LEVEL_TERSE)
26744 return;
26745
26746 gcc_assert (decl);
26747
26748 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26749 should be enough, for DWARF4 and older even if we emit as extension
26750 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26751 for the benefit of consumers unaware of DW_AT_export_symbols. */
26752 if (implicit
26753 && dwarf_version >= 5
26754 && lang_hooks.decls.decl_dwarf_attribute (decl,
26755 DW_AT_export_symbols) == 1)
26756 return;
26757
26758 set_early_dwarf s;
26759
26760 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26761 We need decl DIE for reference and scope die. First, get DIE for the decl
26762 itself. */
26763
26764 /* Get the scope die for decl context. Use comp_unit_die for global module
26765 or decl. If die is not found for non globals, force new die. */
26766 if (context
26767 && TYPE_P (context)
26768 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26769 return;
26770
26771 scope_die = get_context_die (context);
26772
26773 if (child)
26774 {
26775 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26776 there is nothing we can do, here. */
26777 if (dwarf_version < 3 && dwarf_strict)
26778 return;
26779
26780 gcc_assert (scope_die->die_child);
26781 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26782 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26783 scope_die = scope_die->die_child;
26784 }
26785
26786 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26787 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26788 }
26789
26790 /* Output debug information for namelists. */
26791
26792 static dw_die_ref
26793 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26794 {
26795 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26796 tree value;
26797 unsigned i;
26798
26799 if (debug_info_level <= DINFO_LEVEL_TERSE)
26800 return NULL;
26801
26802 gcc_assert (scope_die != NULL);
26803 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26804 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26805
26806 /* If there are no item_decls, we have a nondefining namelist, e.g.
26807 with USE association; hence, set DW_AT_declaration. */
26808 if (item_decls == NULL_TREE)
26809 {
26810 add_AT_flag (nml_die, DW_AT_declaration, 1);
26811 return nml_die;
26812 }
26813
26814 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26815 {
26816 nml_item_ref_die = lookup_decl_die (value);
26817 if (!nml_item_ref_die)
26818 nml_item_ref_die = force_decl_die (value);
26819
26820 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26821 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26822 }
26823 return nml_die;
26824 }
26825
26826
26827 /* Write the debugging output for DECL and return the DIE. */
26828
26829 static void
26830 dwarf2out_decl (tree decl)
26831 {
26832 dw_die_ref context_die = comp_unit_die ();
26833
26834 switch (TREE_CODE (decl))
26835 {
26836 case ERROR_MARK:
26837 return;
26838
26839 case FUNCTION_DECL:
26840 /* If we're a nested function, initially use a parent of NULL; if we're
26841 a plain function, this will be fixed up in decls_for_scope. If
26842 we're a method, it will be ignored, since we already have a DIE.
26843 Avoid doing this late though since clones of class methods may
26844 otherwise end up in limbo and create type DIEs late. */
26845 if (early_dwarf
26846 && decl_function_context (decl)
26847 /* But if we're in terse mode, we don't care about scope. */
26848 && debug_info_level > DINFO_LEVEL_TERSE)
26849 context_die = NULL;
26850 break;
26851
26852 case VAR_DECL:
26853 /* For local statics lookup proper context die. */
26854 if (local_function_static (decl))
26855 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26856
26857 /* If we are in terse mode, don't generate any DIEs to represent any
26858 variable declarations or definitions unless it is external. */
26859 if (debug_info_level < DINFO_LEVEL_TERSE
26860 || (debug_info_level == DINFO_LEVEL_TERSE
26861 && !TREE_PUBLIC (decl)))
26862 return;
26863 break;
26864
26865 case CONST_DECL:
26866 if (debug_info_level <= DINFO_LEVEL_TERSE)
26867 return;
26868 if (!is_fortran () && !is_ada () && !is_dlang ())
26869 return;
26870 if (TREE_STATIC (decl) && decl_function_context (decl))
26871 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26872 break;
26873
26874 case NAMESPACE_DECL:
26875 case IMPORTED_DECL:
26876 if (debug_info_level <= DINFO_LEVEL_TERSE)
26877 return;
26878 if (lookup_decl_die (decl) != NULL)
26879 return;
26880 break;
26881
26882 case TYPE_DECL:
26883 /* Don't emit stubs for types unless they are needed by other DIEs. */
26884 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26885 return;
26886
26887 /* Don't bother trying to generate any DIEs to represent any of the
26888 normal built-in types for the language we are compiling. */
26889 if (DECL_IS_BUILTIN (decl))
26890 return;
26891
26892 /* If we are in terse mode, don't generate any DIEs for types. */
26893 if (debug_info_level <= DINFO_LEVEL_TERSE)
26894 return;
26895
26896 /* If we're a function-scope tag, initially use a parent of NULL;
26897 this will be fixed up in decls_for_scope. */
26898 if (decl_function_context (decl))
26899 context_die = NULL;
26900
26901 break;
26902
26903 case NAMELIST_DECL:
26904 break;
26905
26906 default:
26907 return;
26908 }
26909
26910 gen_decl_die (decl, NULL, NULL, context_die);
26911
26912 if (flag_checking)
26913 {
26914 dw_die_ref die = lookup_decl_die (decl);
26915 if (die)
26916 check_die (die);
26917 }
26918 }
26919
26920 /* Write the debugging output for DECL. */
26921
26922 static void
26923 dwarf2out_function_decl (tree decl)
26924 {
26925 dwarf2out_decl (decl);
26926 call_arg_locations = NULL;
26927 call_arg_loc_last = NULL;
26928 call_site_count = -1;
26929 tail_call_site_count = -1;
26930 decl_loc_table->empty ();
26931 cached_dw_loc_list_table->empty ();
26932 }
26933
26934 /* Output a marker (i.e. a label) for the beginning of the generated code for
26935 a lexical block. */
26936
26937 static void
26938 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26939 unsigned int blocknum)
26940 {
26941 switch_to_section (current_function_section ());
26942 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26943 }
26944
26945 /* Output a marker (i.e. a label) for the end of the generated code for a
26946 lexical block. */
26947
26948 static void
26949 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26950 {
26951 switch_to_section (current_function_section ());
26952 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26953 }
26954
26955 /* Returns nonzero if it is appropriate not to emit any debugging
26956 information for BLOCK, because it doesn't contain any instructions.
26957
26958 Don't allow this for blocks with nested functions or local classes
26959 as we would end up with orphans, and in the presence of scheduling
26960 we may end up calling them anyway. */
26961
26962 static bool
26963 dwarf2out_ignore_block (const_tree block)
26964 {
26965 tree decl;
26966 unsigned int i;
26967
26968 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26969 if (TREE_CODE (decl) == FUNCTION_DECL
26970 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26971 return 0;
26972 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26973 {
26974 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26975 if (TREE_CODE (decl) == FUNCTION_DECL
26976 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26977 return 0;
26978 }
26979
26980 return 1;
26981 }
26982
26983 /* Hash table routines for file_hash. */
26984
26985 bool
26986 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26987 {
26988 return filename_cmp (p1->filename, p2) == 0;
26989 }
26990
26991 hashval_t
26992 dwarf_file_hasher::hash (dwarf_file_data *p)
26993 {
26994 return htab_hash_string (p->filename);
26995 }
26996
26997 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26998 dwarf2out.c) and return its "index". The index of each (known) filename is
26999 just a unique number which is associated with only that one filename. We
27000 need such numbers for the sake of generating labels (in the .debug_sfnames
27001 section) and references to those files numbers (in the .debug_srcinfo
27002 and .debug_macinfo sections). If the filename given as an argument is not
27003 found in our current list, add it to the list and assign it the next
27004 available unique index number. */
27005
27006 static struct dwarf_file_data *
27007 lookup_filename (const char *file_name)
27008 {
27009 struct dwarf_file_data * created;
27010
27011 if (!file_name)
27012 return NULL;
27013
27014 if (!file_name[0])
27015 file_name = "<stdin>";
27016
27017 dwarf_file_data **slot
27018 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27019 INSERT);
27020 if (*slot)
27021 return *slot;
27022
27023 created = ggc_alloc<dwarf_file_data> ();
27024 created->filename = file_name;
27025 created->emitted_number = 0;
27026 *slot = created;
27027 return created;
27028 }
27029
27030 /* If the assembler will construct the file table, then translate the compiler
27031 internal file table number into the assembler file table number, and emit
27032 a .file directive if we haven't already emitted one yet. The file table
27033 numbers are different because we prune debug info for unused variables and
27034 types, which may include filenames. */
27035
27036 static int
27037 maybe_emit_file (struct dwarf_file_data * fd)
27038 {
27039 if (! fd->emitted_number)
27040 {
27041 if (last_emitted_file)
27042 fd->emitted_number = last_emitted_file->emitted_number + 1;
27043 else
27044 fd->emitted_number = 1;
27045 last_emitted_file = fd;
27046
27047 if (output_asm_line_debug_info ())
27048 {
27049 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27050 output_quoted_string (asm_out_file,
27051 remap_debug_filename (fd->filename));
27052 fputc ('\n', asm_out_file);
27053 }
27054 }
27055
27056 return fd->emitted_number;
27057 }
27058
27059 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27060 That generation should happen after function debug info has been
27061 generated. The value of the attribute is the constant value of ARG. */
27062
27063 static void
27064 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27065 {
27066 die_arg_entry entry;
27067
27068 if (!die || !arg)
27069 return;
27070
27071 gcc_assert (early_dwarf);
27072
27073 if (!tmpl_value_parm_die_table)
27074 vec_alloc (tmpl_value_parm_die_table, 32);
27075
27076 entry.die = die;
27077 entry.arg = arg;
27078 vec_safe_push (tmpl_value_parm_die_table, entry);
27079 }
27080
27081 /* Return TRUE if T is an instance of generic type, FALSE
27082 otherwise. */
27083
27084 static bool
27085 generic_type_p (tree t)
27086 {
27087 if (t == NULL_TREE || !TYPE_P (t))
27088 return false;
27089 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27090 }
27091
27092 /* Schedule the generation of the generic parameter dies for the
27093 instance of generic type T. The proper generation itself is later
27094 done by gen_scheduled_generic_parms_dies. */
27095
27096 static void
27097 schedule_generic_params_dies_gen (tree t)
27098 {
27099 if (!generic_type_p (t))
27100 return;
27101
27102 gcc_assert (early_dwarf);
27103
27104 if (!generic_type_instances)
27105 vec_alloc (generic_type_instances, 256);
27106
27107 vec_safe_push (generic_type_instances, t);
27108 }
27109
27110 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27111 by append_entry_to_tmpl_value_parm_die_table. This function must
27112 be called after function DIEs have been generated. */
27113
27114 static void
27115 gen_remaining_tmpl_value_param_die_attribute (void)
27116 {
27117 if (tmpl_value_parm_die_table)
27118 {
27119 unsigned i, j;
27120 die_arg_entry *e;
27121
27122 /* We do this in two phases - first get the cases we can
27123 handle during early-finish, preserving those we cannot
27124 (containing symbolic constants where we don't yet know
27125 whether we are going to output the referenced symbols).
27126 For those we try again at late-finish. */
27127 j = 0;
27128 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27129 {
27130 if (!e->die->removed
27131 && !tree_add_const_value_attribute (e->die, e->arg))
27132 {
27133 dw_loc_descr_ref loc = NULL;
27134 if (! early_dwarf
27135 && (dwarf_version >= 5 || !dwarf_strict))
27136 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27137 if (loc)
27138 add_AT_loc (e->die, DW_AT_location, loc);
27139 else
27140 (*tmpl_value_parm_die_table)[j++] = *e;
27141 }
27142 }
27143 tmpl_value_parm_die_table->truncate (j);
27144 }
27145 }
27146
27147 /* Generate generic parameters DIEs for instances of generic types
27148 that have been previously scheduled by
27149 schedule_generic_params_dies_gen. This function must be called
27150 after all the types of the CU have been laid out. */
27151
27152 static void
27153 gen_scheduled_generic_parms_dies (void)
27154 {
27155 unsigned i;
27156 tree t;
27157
27158 if (!generic_type_instances)
27159 return;
27160
27161 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27162 if (COMPLETE_TYPE_P (t))
27163 gen_generic_params_dies (t);
27164
27165 generic_type_instances = NULL;
27166 }
27167
27168
27169 /* Replace DW_AT_name for the decl with name. */
27170
27171 static void
27172 dwarf2out_set_name (tree decl, tree name)
27173 {
27174 dw_die_ref die;
27175 dw_attr_node *attr;
27176 const char *dname;
27177
27178 die = TYPE_SYMTAB_DIE (decl);
27179 if (!die)
27180 return;
27181
27182 dname = dwarf2_name (name, 0);
27183 if (!dname)
27184 return;
27185
27186 attr = get_AT (die, DW_AT_name);
27187 if (attr)
27188 {
27189 struct indirect_string_node *node;
27190
27191 node = find_AT_string (dname);
27192 /* replace the string. */
27193 attr->dw_attr_val.v.val_str = node;
27194 }
27195
27196 else
27197 add_name_attribute (die, dname);
27198 }
27199
27200 /* True if before or during processing of the first function being emitted. */
27201 static bool in_first_function_p = true;
27202 /* True if loc_note during dwarf2out_var_location call might still be
27203 before first real instruction at address equal to .Ltext0. */
27204 static bool maybe_at_text_label_p = true;
27205 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27206 static unsigned int first_loclabel_num_not_at_text_label;
27207
27208 /* Look ahead for a real insn, or for a begin stmt marker. */
27209
27210 static rtx_insn *
27211 dwarf2out_next_real_insn (rtx_insn *loc_note)
27212 {
27213 rtx_insn *next_real = NEXT_INSN (loc_note);
27214
27215 while (next_real)
27216 if (INSN_P (next_real))
27217 break;
27218 else
27219 next_real = NEXT_INSN (next_real);
27220
27221 return next_real;
27222 }
27223
27224 /* Called by the final INSN scan whenever we see a var location. We
27225 use it to drop labels in the right places, and throw the location in
27226 our lookup table. */
27227
27228 static void
27229 dwarf2out_var_location (rtx_insn *loc_note)
27230 {
27231 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27232 struct var_loc_node *newloc;
27233 rtx_insn *next_real, *next_note;
27234 rtx_insn *call_insn = NULL;
27235 static const char *last_label;
27236 static const char *last_postcall_label;
27237 static bool last_in_cold_section_p;
27238 static rtx_insn *expected_next_loc_note;
27239 tree decl;
27240 bool var_loc_p;
27241 var_loc_view view = 0;
27242
27243 if (!NOTE_P (loc_note))
27244 {
27245 if (CALL_P (loc_note))
27246 {
27247 maybe_reset_location_view (loc_note, cur_line_info_table);
27248 call_site_count++;
27249 if (SIBLING_CALL_P (loc_note))
27250 tail_call_site_count++;
27251 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27252 {
27253 call_insn = loc_note;
27254 loc_note = NULL;
27255 var_loc_p = false;
27256
27257 next_real = dwarf2out_next_real_insn (call_insn);
27258 next_note = NULL;
27259 cached_next_real_insn = NULL;
27260 goto create_label;
27261 }
27262 if (optimize == 0 && !flag_var_tracking)
27263 {
27264 /* When the var-tracking pass is not running, there is no note
27265 for indirect calls whose target is compile-time known. In this
27266 case, process such calls specifically so that we generate call
27267 sites for them anyway. */
27268 rtx x = PATTERN (loc_note);
27269 if (GET_CODE (x) == PARALLEL)
27270 x = XVECEXP (x, 0, 0);
27271 if (GET_CODE (x) == SET)
27272 x = SET_SRC (x);
27273 if (GET_CODE (x) == CALL)
27274 x = XEXP (x, 0);
27275 if (!MEM_P (x)
27276 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27277 || !SYMBOL_REF_DECL (XEXP (x, 0))
27278 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27279 != FUNCTION_DECL))
27280 {
27281 call_insn = loc_note;
27282 loc_note = NULL;
27283 var_loc_p = false;
27284
27285 next_real = dwarf2out_next_real_insn (call_insn);
27286 next_note = NULL;
27287 cached_next_real_insn = NULL;
27288 goto create_label;
27289 }
27290 }
27291 }
27292 else if (!debug_variable_location_views)
27293 gcc_unreachable ();
27294 else
27295 maybe_reset_location_view (loc_note, cur_line_info_table);
27296
27297 return;
27298 }
27299
27300 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27301 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27302 return;
27303
27304 /* Optimize processing a large consecutive sequence of location
27305 notes so we don't spend too much time in next_real_insn. If the
27306 next insn is another location note, remember the next_real_insn
27307 calculation for next time. */
27308 next_real = cached_next_real_insn;
27309 if (next_real)
27310 {
27311 if (expected_next_loc_note != loc_note)
27312 next_real = NULL;
27313 }
27314
27315 next_note = NEXT_INSN (loc_note);
27316 if (! next_note
27317 || next_note->deleted ()
27318 || ! NOTE_P (next_note)
27319 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27320 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27321 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27322 next_note = NULL;
27323
27324 if (! next_real)
27325 next_real = dwarf2out_next_real_insn (loc_note);
27326
27327 if (next_note)
27328 {
27329 expected_next_loc_note = next_note;
27330 cached_next_real_insn = next_real;
27331 }
27332 else
27333 cached_next_real_insn = NULL;
27334
27335 /* If there are no instructions which would be affected by this note,
27336 don't do anything. */
27337 if (var_loc_p
27338 && next_real == NULL_RTX
27339 && !NOTE_DURING_CALL_P (loc_note))
27340 return;
27341
27342 create_label:
27343
27344 if (next_real == NULL_RTX)
27345 next_real = get_last_insn ();
27346
27347 /* If there were any real insns between note we processed last time
27348 and this note (or if it is the first note), clear
27349 last_{,postcall_}label so that they are not reused this time. */
27350 if (last_var_location_insn == NULL_RTX
27351 || last_var_location_insn != next_real
27352 || last_in_cold_section_p != in_cold_section_p)
27353 {
27354 last_label = NULL;
27355 last_postcall_label = NULL;
27356 }
27357
27358 if (var_loc_p)
27359 {
27360 const char *label
27361 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27362 view = cur_line_info_table->view;
27363 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27364 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27365 if (newloc == NULL)
27366 return;
27367 }
27368 else
27369 {
27370 decl = NULL_TREE;
27371 newloc = NULL;
27372 }
27373
27374 /* If there were no real insns between note we processed last time
27375 and this note, use the label we emitted last time. Otherwise
27376 create a new label and emit it. */
27377 if (last_label == NULL)
27378 {
27379 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27380 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27381 loclabel_num++;
27382 last_label = ggc_strdup (loclabel);
27383 /* See if loclabel might be equal to .Ltext0. If yes,
27384 bump first_loclabel_num_not_at_text_label. */
27385 if (!have_multiple_function_sections
27386 && in_first_function_p
27387 && maybe_at_text_label_p)
27388 {
27389 static rtx_insn *last_start;
27390 rtx_insn *insn;
27391 for (insn = loc_note; insn; insn = previous_insn (insn))
27392 if (insn == last_start)
27393 break;
27394 else if (!NONDEBUG_INSN_P (insn))
27395 continue;
27396 else
27397 {
27398 rtx body = PATTERN (insn);
27399 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27400 continue;
27401 /* Inline asm could occupy zero bytes. */
27402 else if (GET_CODE (body) == ASM_INPUT
27403 || asm_noperands (body) >= 0)
27404 continue;
27405 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27406 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27407 continue;
27408 #endif
27409 else
27410 {
27411 /* Assume insn has non-zero length. */
27412 maybe_at_text_label_p = false;
27413 break;
27414 }
27415 }
27416 if (maybe_at_text_label_p)
27417 {
27418 last_start = loc_note;
27419 first_loclabel_num_not_at_text_label = loclabel_num;
27420 }
27421 }
27422 }
27423
27424 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27425 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27426
27427 if (!var_loc_p)
27428 {
27429 struct call_arg_loc_node *ca_loc
27430 = ggc_cleared_alloc<call_arg_loc_node> ();
27431 rtx_insn *prev = call_insn;
27432
27433 ca_loc->call_arg_loc_note
27434 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27435 ca_loc->next = NULL;
27436 ca_loc->label = last_label;
27437 gcc_assert (prev
27438 && (CALL_P (prev)
27439 || (NONJUMP_INSN_P (prev)
27440 && GET_CODE (PATTERN (prev)) == SEQUENCE
27441 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27442 if (!CALL_P (prev))
27443 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27444 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27445
27446 /* Look for a SYMBOL_REF in the "prev" instruction. */
27447 rtx x = get_call_rtx_from (prev);
27448 if (x)
27449 {
27450 /* Try to get the call symbol, if any. */
27451 if (MEM_P (XEXP (x, 0)))
27452 x = XEXP (x, 0);
27453 /* First, look for a memory access to a symbol_ref. */
27454 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27455 && SYMBOL_REF_DECL (XEXP (x, 0))
27456 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27457 ca_loc->symbol_ref = XEXP (x, 0);
27458 /* Otherwise, look at a compile-time known user-level function
27459 declaration. */
27460 else if (MEM_P (x)
27461 && MEM_EXPR (x)
27462 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27463 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27464 }
27465
27466 ca_loc->block = insn_scope (prev);
27467 if (call_arg_locations)
27468 call_arg_loc_last->next = ca_loc;
27469 else
27470 call_arg_locations = ca_loc;
27471 call_arg_loc_last = ca_loc;
27472 }
27473 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27474 {
27475 newloc->label = last_label;
27476 newloc->view = view;
27477 }
27478 else
27479 {
27480 if (!last_postcall_label)
27481 {
27482 sprintf (loclabel, "%s-1", last_label);
27483 last_postcall_label = ggc_strdup (loclabel);
27484 }
27485 newloc->label = last_postcall_label;
27486 /* ??? This view is at last_label, not last_label-1, but we
27487 could only assume view at last_label-1 is zero if we could
27488 assume calls always have length greater than one. This is
27489 probably true in general, though there might be a rare
27490 exception to this rule, e.g. if a call insn is optimized out
27491 by target magic. Then, even the -1 in the label will be
27492 wrong, which might invalidate the range. Anyway, using view,
27493 though technically possibly incorrect, will work as far as
27494 ranges go: since L-1 is in the middle of the call insn,
27495 (L-1).0 and (L-1).V shouldn't make any difference, and having
27496 the loclist entry refer to the .loc entry might be useful, so
27497 leave it like this. */
27498 newloc->view = view;
27499 }
27500
27501 if (var_loc_p && flag_debug_asm)
27502 {
27503 const char *name, *sep, *patstr;
27504 if (decl && DECL_NAME (decl))
27505 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27506 else
27507 name = "";
27508 if (NOTE_VAR_LOCATION_LOC (loc_note))
27509 {
27510 sep = " => ";
27511 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27512 }
27513 else
27514 {
27515 sep = " ";
27516 patstr = "RESET";
27517 }
27518 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27519 name, sep, patstr);
27520 }
27521
27522 last_var_location_insn = next_real;
27523 last_in_cold_section_p = in_cold_section_p;
27524 }
27525
27526 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27527 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27528 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27529 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27530 BLOCK_FRAGMENT_ORIGIN links. */
27531 static bool
27532 block_within_block_p (tree block, tree outer, bool bothways)
27533 {
27534 if (block == outer)
27535 return true;
27536
27537 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27538 for (tree context = BLOCK_SUPERCONTEXT (block);
27539 context != outer;
27540 context = BLOCK_SUPERCONTEXT (context))
27541 if (!context || TREE_CODE (context) != BLOCK)
27542 return false;
27543
27544 if (!bothways)
27545 return true;
27546
27547 /* Now check that each block is actually referenced by its
27548 parent. */
27549 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27550 context = BLOCK_SUPERCONTEXT (context))
27551 {
27552 if (BLOCK_FRAGMENT_ORIGIN (context))
27553 {
27554 gcc_assert (!BLOCK_SUBBLOCKS (context));
27555 context = BLOCK_FRAGMENT_ORIGIN (context);
27556 }
27557 for (tree sub = BLOCK_SUBBLOCKS (context);
27558 sub != block;
27559 sub = BLOCK_CHAIN (sub))
27560 if (!sub)
27561 return false;
27562 if (context == outer)
27563 return true;
27564 else
27565 block = context;
27566 }
27567 }
27568
27569 /* Called during final while assembling the marker of the entry point
27570 for an inlined function. */
27571
27572 static void
27573 dwarf2out_inline_entry (tree block)
27574 {
27575 gcc_assert (debug_inline_points);
27576
27577 /* If we can't represent it, don't bother. */
27578 if (!(dwarf_version >= 3 || !dwarf_strict))
27579 return;
27580
27581 gcc_assert (DECL_P (block_ultimate_origin (block)));
27582
27583 /* Sanity check the block tree. This would catch a case in which
27584 BLOCK got removed from the tree reachable from the outermost
27585 lexical block, but got retained in markers. It would still link
27586 back to its parents, but some ancestor would be missing a link
27587 down the path to the sub BLOCK. If the block got removed, its
27588 BLOCK_NUMBER will not be a usable value. */
27589 if (flag_checking)
27590 gcc_assert (block_within_block_p (block,
27591 DECL_INITIAL (current_function_decl),
27592 true));
27593
27594 gcc_assert (inlined_function_outer_scope_p (block));
27595 gcc_assert (!lookup_block_die (block));
27596
27597 if (BLOCK_FRAGMENT_ORIGIN (block))
27598 block = BLOCK_FRAGMENT_ORIGIN (block);
27599 /* Can the entry point ever not be at the beginning of an
27600 unfragmented lexical block? */
27601 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27602 || (cur_line_info_table
27603 && !ZERO_VIEW_P (cur_line_info_table->view))))
27604 return;
27605
27606 if (!inline_entry_data_table)
27607 inline_entry_data_table
27608 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27609
27610
27611 inline_entry_data **iedp
27612 = inline_entry_data_table->find_slot_with_hash (block,
27613 htab_hash_pointer (block),
27614 INSERT);
27615 if (*iedp)
27616 /* ??? Ideally, we'd record all entry points for the same inlined
27617 function (some may have been duplicated by e.g. unrolling), but
27618 we have no way to represent that ATM. */
27619 return;
27620
27621 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27622 ied->block = block;
27623 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27624 ied->label_num = BLOCK_NUMBER (block);
27625 if (cur_line_info_table)
27626 ied->view = cur_line_info_table->view;
27627
27628 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL,
27629 BLOCK_NUMBER (block));
27630 }
27631
27632 /* Called from finalize_size_functions for size functions so that their body
27633 can be encoded in the debug info to describe the layout of variable-length
27634 structures. */
27635
27636 static void
27637 dwarf2out_size_function (tree decl)
27638 {
27639 set_early_dwarf s;
27640 function_to_dwarf_procedure (decl);
27641 }
27642
27643 /* Note in one location list that text section has changed. */
27644
27645 int
27646 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27647 {
27648 var_loc_list *list = *slot;
27649 if (list->first)
27650 list->last_before_switch
27651 = list->last->next ? list->last->next : list->last;
27652 return 1;
27653 }
27654
27655 /* Note in all location lists that text section has changed. */
27656
27657 static void
27658 var_location_switch_text_section (void)
27659 {
27660 if (decl_loc_table == NULL)
27661 return;
27662
27663 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27664 }
27665
27666 /* Create a new line number table. */
27667
27668 static dw_line_info_table *
27669 new_line_info_table (void)
27670 {
27671 dw_line_info_table *table;
27672
27673 table = ggc_cleared_alloc<dw_line_info_table> ();
27674 table->file_num = 1;
27675 table->line_num = 1;
27676 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27677 FORCE_RESET_NEXT_VIEW (table->view);
27678 table->symviews_since_reset = 0;
27679
27680 return table;
27681 }
27682
27683 /* Lookup the "current" table into which we emit line info, so
27684 that we don't have to do it for every source line. */
27685
27686 static void
27687 set_cur_line_info_table (section *sec)
27688 {
27689 dw_line_info_table *table;
27690
27691 if (sec == text_section)
27692 table = text_section_line_info;
27693 else if (sec == cold_text_section)
27694 {
27695 table = cold_text_section_line_info;
27696 if (!table)
27697 {
27698 cold_text_section_line_info = table = new_line_info_table ();
27699 table->end_label = cold_end_label;
27700 }
27701 }
27702 else
27703 {
27704 const char *end_label;
27705
27706 if (crtl->has_bb_partition)
27707 {
27708 if (in_cold_section_p)
27709 end_label = crtl->subsections.cold_section_end_label;
27710 else
27711 end_label = crtl->subsections.hot_section_end_label;
27712 }
27713 else
27714 {
27715 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27716 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27717 current_function_funcdef_no);
27718 end_label = ggc_strdup (label);
27719 }
27720
27721 table = new_line_info_table ();
27722 table->end_label = end_label;
27723
27724 vec_safe_push (separate_line_info, table);
27725 }
27726
27727 if (output_asm_line_debug_info ())
27728 table->is_stmt = (cur_line_info_table
27729 ? cur_line_info_table->is_stmt
27730 : DWARF_LINE_DEFAULT_IS_STMT_START);
27731 cur_line_info_table = table;
27732 }
27733
27734
27735 /* We need to reset the locations at the beginning of each
27736 function. We can't do this in the end_function hook, because the
27737 declarations that use the locations won't have been output when
27738 that hook is called. Also compute have_multiple_function_sections here. */
27739
27740 static void
27741 dwarf2out_begin_function (tree fun)
27742 {
27743 section *sec = function_section (fun);
27744
27745 if (sec != text_section)
27746 have_multiple_function_sections = true;
27747
27748 if (crtl->has_bb_partition && !cold_text_section)
27749 {
27750 gcc_assert (current_function_decl == fun);
27751 cold_text_section = unlikely_text_section ();
27752 switch_to_section (cold_text_section);
27753 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27754 switch_to_section (sec);
27755 }
27756
27757 dwarf2out_note_section_used ();
27758 call_site_count = 0;
27759 tail_call_site_count = 0;
27760
27761 set_cur_line_info_table (sec);
27762 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27763 }
27764
27765 /* Helper function of dwarf2out_end_function, called only after emitting
27766 the very first function into assembly. Check if some .debug_loc range
27767 might end with a .LVL* label that could be equal to .Ltext0.
27768 In that case we must force using absolute addresses in .debug_loc ranges,
27769 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27770 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27771 list terminator.
27772 Set have_multiple_function_sections to true in that case and
27773 terminate htab traversal. */
27774
27775 int
27776 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27777 {
27778 var_loc_list *entry = *slot;
27779 struct var_loc_node *node;
27780
27781 node = entry->first;
27782 if (node && node->next && node->next->label)
27783 {
27784 unsigned int i;
27785 const char *label = node->next->label;
27786 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27787
27788 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27789 {
27790 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27791 if (strcmp (label, loclabel) == 0)
27792 {
27793 have_multiple_function_sections = true;
27794 return 0;
27795 }
27796 }
27797 }
27798 return 1;
27799 }
27800
27801 /* Hook called after emitting a function into assembly.
27802 This does something only for the very first function emitted. */
27803
27804 static void
27805 dwarf2out_end_function (unsigned int)
27806 {
27807 if (in_first_function_p
27808 && !have_multiple_function_sections
27809 && first_loclabel_num_not_at_text_label
27810 && decl_loc_table)
27811 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27812 in_first_function_p = false;
27813 maybe_at_text_label_p = false;
27814 }
27815
27816 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27817 front-ends register a translation unit even before dwarf2out_init is
27818 called. */
27819 static tree main_translation_unit = NULL_TREE;
27820
27821 /* Hook called by front-ends after they built their main translation unit.
27822 Associate comp_unit_die to UNIT. */
27823
27824 static void
27825 dwarf2out_register_main_translation_unit (tree unit)
27826 {
27827 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27828 && main_translation_unit == NULL_TREE);
27829 main_translation_unit = unit;
27830 /* If dwarf2out_init has not been called yet, it will perform the association
27831 itself looking at main_translation_unit. */
27832 if (decl_die_table != NULL)
27833 equate_decl_number_to_die (unit, comp_unit_die ());
27834 }
27835
27836 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27837
27838 static void
27839 push_dw_line_info_entry (dw_line_info_table *table,
27840 enum dw_line_info_opcode opcode, unsigned int val)
27841 {
27842 dw_line_info_entry e;
27843 e.opcode = opcode;
27844 e.val = val;
27845 vec_safe_push (table->entries, e);
27846 }
27847
27848 /* Output a label to mark the beginning of a source code line entry
27849 and record information relating to this source line, in
27850 'line_info_table' for later output of the .debug_line section. */
27851 /* ??? The discriminator parameter ought to be unsigned. */
27852
27853 static void
27854 dwarf2out_source_line (unsigned int line, unsigned int column,
27855 const char *filename,
27856 int discriminator, bool is_stmt)
27857 {
27858 unsigned int file_num;
27859 dw_line_info_table *table;
27860 static var_loc_view lvugid;
27861
27862 if (debug_info_level < DINFO_LEVEL_TERSE)
27863 return;
27864
27865 table = cur_line_info_table;
27866
27867 if (line == 0)
27868 {
27869 if (debug_variable_location_views
27870 && output_asm_line_debug_info ()
27871 && table && !RESETTING_VIEW_P (table->view))
27872 {
27873 /* If we're using the assembler to compute view numbers, we
27874 can't issue a .loc directive for line zero, so we can't
27875 get a view number at this point. We might attempt to
27876 compute it from the previous view, or equate it to a
27877 subsequent view (though it might not be there!), but
27878 since we're omitting the line number entry, we might as
27879 well omit the view number as well. That means pretending
27880 it's a view number zero, which might very well turn out
27881 to be correct. ??? Extend the assembler so that the
27882 compiler could emit e.g. ".locview .LVU#", to output a
27883 view without changing line number information. We'd then
27884 have to count it in symviews_since_reset; when it's omitted,
27885 it doesn't count. */
27886 if (!zero_view_p)
27887 zero_view_p = BITMAP_GGC_ALLOC ();
27888 bitmap_set_bit (zero_view_p, table->view);
27889 if (flag_debug_asm)
27890 {
27891 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27892 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27893 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27894 ASM_COMMENT_START);
27895 assemble_name (asm_out_file, label);
27896 putc ('\n', asm_out_file);
27897 }
27898 table->view = ++lvugid;
27899 }
27900 return;
27901 }
27902
27903 /* The discriminator column was added in dwarf4. Simplify the below
27904 by simply removing it if we're not supposed to output it. */
27905 if (dwarf_version < 4 && dwarf_strict)
27906 discriminator = 0;
27907
27908 if (!debug_column_info)
27909 column = 0;
27910
27911 file_num = maybe_emit_file (lookup_filename (filename));
27912
27913 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27914 the debugger has used the second (possibly duplicate) line number
27915 at the beginning of the function to mark the end of the prologue.
27916 We could eliminate any other duplicates within the function. For
27917 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27918 that second line number entry. */
27919 /* Recall that this end-of-prologue indication is *not* the same thing
27920 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27921 to which the hook corresponds, follows the last insn that was
27922 emitted by gen_prologue. What we need is to precede the first insn
27923 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27924 insn that corresponds to something the user wrote. These may be
27925 very different locations once scheduling is enabled. */
27926
27927 if (0 && file_num == table->file_num
27928 && line == table->line_num
27929 && column == table->column_num
27930 && discriminator == table->discrim_num
27931 && is_stmt == table->is_stmt)
27932 return;
27933
27934 switch_to_section (current_function_section ());
27935
27936 /* If requested, emit something human-readable. */
27937 if (flag_debug_asm)
27938 {
27939 if (debug_column_info)
27940 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27941 filename, line, column);
27942 else
27943 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27944 filename, line);
27945 }
27946
27947 if (output_asm_line_debug_info ())
27948 {
27949 /* Emit the .loc directive understood by GNU as. */
27950 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27951 file_num, line, is_stmt, discriminator */
27952 fputs ("\t.loc ", asm_out_file);
27953 fprint_ul (asm_out_file, file_num);
27954 putc (' ', asm_out_file);
27955 fprint_ul (asm_out_file, line);
27956 putc (' ', asm_out_file);
27957 fprint_ul (asm_out_file, column);
27958
27959 if (is_stmt != table->is_stmt)
27960 {
27961 #if HAVE_GAS_LOC_STMT
27962 fputs (" is_stmt ", asm_out_file);
27963 putc (is_stmt ? '1' : '0', asm_out_file);
27964 #endif
27965 }
27966 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27967 {
27968 gcc_assert (discriminator > 0);
27969 fputs (" discriminator ", asm_out_file);
27970 fprint_ul (asm_out_file, (unsigned long) discriminator);
27971 }
27972 if (debug_variable_location_views)
27973 {
27974 if (!RESETTING_VIEW_P (table->view))
27975 {
27976 table->symviews_since_reset++;
27977 if (table->symviews_since_reset > symview_upper_bound)
27978 symview_upper_bound = table->symviews_since_reset;
27979 /* When we're using the assembler to compute view
27980 numbers, we output symbolic labels after "view" in
27981 .loc directives, and the assembler will set them for
27982 us, so that we can refer to the view numbers in
27983 location lists. The only exceptions are when we know
27984 a view will be zero: "-0" is a forced reset, used
27985 e.g. in the beginning of functions, whereas "0" tells
27986 the assembler to check that there was a PC change
27987 since the previous view, in a way that implicitly
27988 resets the next view. */
27989 fputs (" view ", asm_out_file);
27990 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27991 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27992 assemble_name (asm_out_file, label);
27993 table->view = ++lvugid;
27994 }
27995 else
27996 {
27997 table->symviews_since_reset = 0;
27998 if (FORCE_RESETTING_VIEW_P (table->view))
27999 fputs (" view -0", asm_out_file);
28000 else
28001 fputs (" view 0", asm_out_file);
28002 /* Mark the present view as a zero view. Earlier debug
28003 binds may have already added its id to loclists to be
28004 emitted later, so we can't reuse the id for something
28005 else. However, it's good to know whether a view is
28006 known to be zero, because then we may be able to
28007 optimize out locviews that are all zeros, so take
28008 note of it in zero_view_p. */
28009 if (!zero_view_p)
28010 zero_view_p = BITMAP_GGC_ALLOC ();
28011 bitmap_set_bit (zero_view_p, lvugid);
28012 table->view = ++lvugid;
28013 }
28014 }
28015 putc ('\n', asm_out_file);
28016 }
28017 else
28018 {
28019 unsigned int label_num = ++line_info_label_num;
28020
28021 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28022
28023 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28024 push_dw_line_info_entry (table, LI_adv_address, label_num);
28025 else
28026 push_dw_line_info_entry (table, LI_set_address, label_num);
28027 if (debug_variable_location_views)
28028 {
28029 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28030 if (resetting)
28031 table->view = 0;
28032
28033 if (flag_debug_asm)
28034 fprintf (asm_out_file, "\t%s view %s%d\n",
28035 ASM_COMMENT_START,
28036 resetting ? "-" : "",
28037 table->view);
28038
28039 table->view++;
28040 }
28041 if (file_num != table->file_num)
28042 push_dw_line_info_entry (table, LI_set_file, file_num);
28043 if (discriminator != table->discrim_num)
28044 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28045 if (is_stmt != table->is_stmt)
28046 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28047 push_dw_line_info_entry (table, LI_set_line, line);
28048 if (debug_column_info)
28049 push_dw_line_info_entry (table, LI_set_column, column);
28050 }
28051
28052 table->file_num = file_num;
28053 table->line_num = line;
28054 table->column_num = column;
28055 table->discrim_num = discriminator;
28056 table->is_stmt = is_stmt;
28057 table->in_use = true;
28058 }
28059
28060 /* Record the beginning of a new source file. */
28061
28062 static void
28063 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28064 {
28065 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28066 {
28067 macinfo_entry e;
28068 e.code = DW_MACINFO_start_file;
28069 e.lineno = lineno;
28070 e.info = ggc_strdup (filename);
28071 vec_safe_push (macinfo_table, e);
28072 }
28073 }
28074
28075 /* Record the end of a source file. */
28076
28077 static void
28078 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28079 {
28080 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28081 {
28082 macinfo_entry e;
28083 e.code = DW_MACINFO_end_file;
28084 e.lineno = lineno;
28085 e.info = NULL;
28086 vec_safe_push (macinfo_table, e);
28087 }
28088 }
28089
28090 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28091 the tail part of the directive line, i.e. the part which is past the
28092 initial whitespace, #, whitespace, directive-name, whitespace part. */
28093
28094 static void
28095 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28096 const char *buffer ATTRIBUTE_UNUSED)
28097 {
28098 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28099 {
28100 macinfo_entry e;
28101 /* Insert a dummy first entry to be able to optimize the whole
28102 predefined macro block using DW_MACRO_import. */
28103 if (macinfo_table->is_empty () && lineno <= 1)
28104 {
28105 e.code = 0;
28106 e.lineno = 0;
28107 e.info = NULL;
28108 vec_safe_push (macinfo_table, e);
28109 }
28110 e.code = DW_MACINFO_define;
28111 e.lineno = lineno;
28112 e.info = ggc_strdup (buffer);
28113 vec_safe_push (macinfo_table, e);
28114 }
28115 }
28116
28117 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28118 the tail part of the directive line, i.e. the part which is past the
28119 initial whitespace, #, whitespace, directive-name, whitespace part. */
28120
28121 static void
28122 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28123 const char *buffer ATTRIBUTE_UNUSED)
28124 {
28125 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28126 {
28127 macinfo_entry e;
28128 /* Insert a dummy first entry to be able to optimize the whole
28129 predefined macro block using DW_MACRO_import. */
28130 if (macinfo_table->is_empty () && lineno <= 1)
28131 {
28132 e.code = 0;
28133 e.lineno = 0;
28134 e.info = NULL;
28135 vec_safe_push (macinfo_table, e);
28136 }
28137 e.code = DW_MACINFO_undef;
28138 e.lineno = lineno;
28139 e.info = ggc_strdup (buffer);
28140 vec_safe_push (macinfo_table, e);
28141 }
28142 }
28143
28144 /* Helpers to manipulate hash table of CUs. */
28145
28146 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28147 {
28148 static inline hashval_t hash (const macinfo_entry *);
28149 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28150 };
28151
28152 inline hashval_t
28153 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28154 {
28155 return htab_hash_string (entry->info);
28156 }
28157
28158 inline bool
28159 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28160 const macinfo_entry *entry2)
28161 {
28162 return !strcmp (entry1->info, entry2->info);
28163 }
28164
28165 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28166
28167 /* Output a single .debug_macinfo entry. */
28168
28169 static void
28170 output_macinfo_op (macinfo_entry *ref)
28171 {
28172 int file_num;
28173 size_t len;
28174 struct indirect_string_node *node;
28175 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28176 struct dwarf_file_data *fd;
28177
28178 switch (ref->code)
28179 {
28180 case DW_MACINFO_start_file:
28181 fd = lookup_filename (ref->info);
28182 file_num = maybe_emit_file (fd);
28183 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28184 dw2_asm_output_data_uleb128 (ref->lineno,
28185 "Included from line number %lu",
28186 (unsigned long) ref->lineno);
28187 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28188 break;
28189 case DW_MACINFO_end_file:
28190 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28191 break;
28192 case DW_MACINFO_define:
28193 case DW_MACINFO_undef:
28194 len = strlen (ref->info) + 1;
28195 if (!dwarf_strict
28196 && len > DWARF_OFFSET_SIZE
28197 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28198 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28199 {
28200 ref->code = ref->code == DW_MACINFO_define
28201 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28202 output_macinfo_op (ref);
28203 return;
28204 }
28205 dw2_asm_output_data (1, ref->code,
28206 ref->code == DW_MACINFO_define
28207 ? "Define macro" : "Undefine macro");
28208 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28209 (unsigned long) ref->lineno);
28210 dw2_asm_output_nstring (ref->info, -1, "The macro");
28211 break;
28212 case DW_MACRO_define_strp:
28213 case DW_MACRO_undef_strp:
28214 /* NB: dwarf2out_finish performs:
28215 1. save_macinfo_strings
28216 2. hash table traverse of index_string
28217 3. output_macinfo -> output_macinfo_op
28218 4. output_indirect_strings
28219 -> hash table traverse of output_index_string
28220
28221 When output_macinfo_op is called, all index strings have been
28222 added to hash table by save_macinfo_strings and we can't pass
28223 INSERT to find_slot_with_hash which may expand hash table, even
28224 if no insertion is needed, and change hash table traverse order
28225 between index_string and output_index_string. */
28226 node = find_AT_string (ref->info, NO_INSERT);
28227 gcc_assert (node
28228 && (node->form == DW_FORM_strp
28229 || node->form == dwarf_FORM (DW_FORM_strx)));
28230 dw2_asm_output_data (1, ref->code,
28231 ref->code == DW_MACRO_define_strp
28232 ? "Define macro strp"
28233 : "Undefine macro strp");
28234 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28235 (unsigned long) ref->lineno);
28236 if (node->form == DW_FORM_strp)
28237 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28238 debug_str_section, "The macro: \"%s\"",
28239 ref->info);
28240 else
28241 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28242 ref->info);
28243 break;
28244 case DW_MACRO_import:
28245 dw2_asm_output_data (1, ref->code, "Import");
28246 ASM_GENERATE_INTERNAL_LABEL (label,
28247 DEBUG_MACRO_SECTION_LABEL,
28248 ref->lineno + macinfo_label_base);
28249 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28250 break;
28251 default:
28252 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28253 ASM_COMMENT_START, (unsigned long) ref->code);
28254 break;
28255 }
28256 }
28257
28258 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28259 other compilation unit .debug_macinfo sections. IDX is the first
28260 index of a define/undef, return the number of ops that should be
28261 emitted in a comdat .debug_macinfo section and emit
28262 a DW_MACRO_import entry referencing it.
28263 If the define/undef entry should be emitted normally, return 0. */
28264
28265 static unsigned
28266 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28267 macinfo_hash_type **macinfo_htab)
28268 {
28269 macinfo_entry *first, *second, *cur, *inc;
28270 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28271 unsigned char checksum[16];
28272 struct md5_ctx ctx;
28273 char *grp_name, *tail;
28274 const char *base;
28275 unsigned int i, count, encoded_filename_len, linebuf_len;
28276 macinfo_entry **slot;
28277
28278 first = &(*macinfo_table)[idx];
28279 second = &(*macinfo_table)[idx + 1];
28280
28281 /* Optimize only if there are at least two consecutive define/undef ops,
28282 and either all of them are before first DW_MACINFO_start_file
28283 with lineno {0,1} (i.e. predefined macro block), or all of them are
28284 in some included header file. */
28285 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28286 return 0;
28287 if (vec_safe_is_empty (files))
28288 {
28289 if (first->lineno > 1 || second->lineno > 1)
28290 return 0;
28291 }
28292 else if (first->lineno == 0)
28293 return 0;
28294
28295 /* Find the last define/undef entry that can be grouped together
28296 with first and at the same time compute md5 checksum of their
28297 codes, linenumbers and strings. */
28298 md5_init_ctx (&ctx);
28299 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28300 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28301 break;
28302 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28303 break;
28304 else
28305 {
28306 unsigned char code = cur->code;
28307 md5_process_bytes (&code, 1, &ctx);
28308 checksum_uleb128 (cur->lineno, &ctx);
28309 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28310 }
28311 md5_finish_ctx (&ctx, checksum);
28312 count = i - idx;
28313
28314 /* From the containing include filename (if any) pick up just
28315 usable characters from its basename. */
28316 if (vec_safe_is_empty (files))
28317 base = "";
28318 else
28319 base = lbasename (files->last ().info);
28320 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28321 if (ISIDNUM (base[i]) || base[i] == '.')
28322 encoded_filename_len++;
28323 /* Count . at the end. */
28324 if (encoded_filename_len)
28325 encoded_filename_len++;
28326
28327 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28328 linebuf_len = strlen (linebuf);
28329
28330 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28331 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28332 + 16 * 2 + 1);
28333 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28334 tail = grp_name + 4;
28335 if (encoded_filename_len)
28336 {
28337 for (i = 0; base[i]; i++)
28338 if (ISIDNUM (base[i]) || base[i] == '.')
28339 *tail++ = base[i];
28340 *tail++ = '.';
28341 }
28342 memcpy (tail, linebuf, linebuf_len);
28343 tail += linebuf_len;
28344 *tail++ = '.';
28345 for (i = 0; i < 16; i++)
28346 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28347
28348 /* Construct a macinfo_entry for DW_MACRO_import
28349 in the empty vector entry before the first define/undef. */
28350 inc = &(*macinfo_table)[idx - 1];
28351 inc->code = DW_MACRO_import;
28352 inc->lineno = 0;
28353 inc->info = ggc_strdup (grp_name);
28354 if (!*macinfo_htab)
28355 *macinfo_htab = new macinfo_hash_type (10);
28356 /* Avoid emitting duplicates. */
28357 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28358 if (*slot != NULL)
28359 {
28360 inc->code = 0;
28361 inc->info = NULL;
28362 /* If such an entry has been used before, just emit
28363 a DW_MACRO_import op. */
28364 inc = *slot;
28365 output_macinfo_op (inc);
28366 /* And clear all macinfo_entry in the range to avoid emitting them
28367 in the second pass. */
28368 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28369 {
28370 cur->code = 0;
28371 cur->info = NULL;
28372 }
28373 }
28374 else
28375 {
28376 *slot = inc;
28377 inc->lineno = (*macinfo_htab)->elements ();
28378 output_macinfo_op (inc);
28379 }
28380 return count;
28381 }
28382
28383 /* Save any strings needed by the macinfo table in the debug str
28384 table. All strings must be collected into the table by the time
28385 index_string is called. */
28386
28387 static void
28388 save_macinfo_strings (void)
28389 {
28390 unsigned len;
28391 unsigned i;
28392 macinfo_entry *ref;
28393
28394 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28395 {
28396 switch (ref->code)
28397 {
28398 /* Match the logic in output_macinfo_op to decide on
28399 indirect strings. */
28400 case DW_MACINFO_define:
28401 case DW_MACINFO_undef:
28402 len = strlen (ref->info) + 1;
28403 if (!dwarf_strict
28404 && len > DWARF_OFFSET_SIZE
28405 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28406 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28407 set_indirect_string (find_AT_string (ref->info));
28408 break;
28409 case DW_MACINFO_start_file:
28410 /* -gsplit-dwarf -g3 will also output filename as indirect
28411 string. */
28412 if (!dwarf_split_debug_info)
28413 break;
28414 /* Fall through. */
28415 case DW_MACRO_define_strp:
28416 case DW_MACRO_undef_strp:
28417 set_indirect_string (find_AT_string (ref->info));
28418 break;
28419 default:
28420 break;
28421 }
28422 }
28423 }
28424
28425 /* Output macinfo section(s). */
28426
28427 static void
28428 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28429 {
28430 unsigned i;
28431 unsigned long length = vec_safe_length (macinfo_table);
28432 macinfo_entry *ref;
28433 vec<macinfo_entry, va_gc> *files = NULL;
28434 macinfo_hash_type *macinfo_htab = NULL;
28435 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28436
28437 if (! length)
28438 return;
28439
28440 /* output_macinfo* uses these interchangeably. */
28441 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28442 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28443 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28444 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28445
28446 /* AIX Assembler inserts the length, so adjust the reference to match the
28447 offset expected by debuggers. */
28448 strcpy (dl_section_ref, debug_line_label);
28449 if (XCOFF_DEBUGGING_INFO)
28450 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28451
28452 /* For .debug_macro emit the section header. */
28453 if (!dwarf_strict || dwarf_version >= 5)
28454 {
28455 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28456 "DWARF macro version number");
28457 if (DWARF_OFFSET_SIZE == 8)
28458 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28459 else
28460 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28461 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28462 debug_line_section, NULL);
28463 }
28464
28465 /* In the first loop, it emits the primary .debug_macinfo section
28466 and after each emitted op the macinfo_entry is cleared.
28467 If a longer range of define/undef ops can be optimized using
28468 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28469 the vector before the first define/undef in the range and the
28470 whole range of define/undef ops is not emitted and kept. */
28471 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28472 {
28473 switch (ref->code)
28474 {
28475 case DW_MACINFO_start_file:
28476 vec_safe_push (files, *ref);
28477 break;
28478 case DW_MACINFO_end_file:
28479 if (!vec_safe_is_empty (files))
28480 files->pop ();
28481 break;
28482 case DW_MACINFO_define:
28483 case DW_MACINFO_undef:
28484 if ((!dwarf_strict || dwarf_version >= 5)
28485 && HAVE_COMDAT_GROUP
28486 && vec_safe_length (files) != 1
28487 && i > 0
28488 && i + 1 < length
28489 && (*macinfo_table)[i - 1].code == 0)
28490 {
28491 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28492 if (count)
28493 {
28494 i += count - 1;
28495 continue;
28496 }
28497 }
28498 break;
28499 case 0:
28500 /* A dummy entry may be inserted at the beginning to be able
28501 to optimize the whole block of predefined macros. */
28502 if (i == 0)
28503 continue;
28504 default:
28505 break;
28506 }
28507 output_macinfo_op (ref);
28508 ref->info = NULL;
28509 ref->code = 0;
28510 }
28511
28512 if (!macinfo_htab)
28513 return;
28514
28515 /* Save the number of transparent includes so we can adjust the
28516 label number for the fat LTO object DWARF. */
28517 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28518
28519 delete macinfo_htab;
28520 macinfo_htab = NULL;
28521
28522 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28523 terminate the current chain and switch to a new comdat .debug_macinfo
28524 section and emit the define/undef entries within it. */
28525 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28526 switch (ref->code)
28527 {
28528 case 0:
28529 continue;
28530 case DW_MACRO_import:
28531 {
28532 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28533 tree comdat_key = get_identifier (ref->info);
28534 /* Terminate the previous .debug_macinfo section. */
28535 dw2_asm_output_data (1, 0, "End compilation unit");
28536 targetm.asm_out.named_section (debug_macinfo_section_name,
28537 SECTION_DEBUG
28538 | SECTION_LINKONCE
28539 | (early_lto_debug
28540 ? SECTION_EXCLUDE : 0),
28541 comdat_key);
28542 ASM_GENERATE_INTERNAL_LABEL (label,
28543 DEBUG_MACRO_SECTION_LABEL,
28544 ref->lineno + macinfo_label_base);
28545 ASM_OUTPUT_LABEL (asm_out_file, label);
28546 ref->code = 0;
28547 ref->info = NULL;
28548 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28549 "DWARF macro version number");
28550 if (DWARF_OFFSET_SIZE == 8)
28551 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28552 else
28553 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28554 }
28555 break;
28556 case DW_MACINFO_define:
28557 case DW_MACINFO_undef:
28558 output_macinfo_op (ref);
28559 ref->code = 0;
28560 ref->info = NULL;
28561 break;
28562 default:
28563 gcc_unreachable ();
28564 }
28565
28566 macinfo_label_base += macinfo_label_base_adj;
28567 }
28568
28569 /* Initialize the various sections and labels for dwarf output and prefix
28570 them with PREFIX if non-NULL. Returns the generation (zero based
28571 number of times function was called). */
28572
28573 static unsigned
28574 init_sections_and_labels (bool early_lto_debug)
28575 {
28576 /* As we may get called multiple times have a generation count for
28577 labels. */
28578 static unsigned generation = 0;
28579
28580 if (early_lto_debug)
28581 {
28582 if (!dwarf_split_debug_info)
28583 {
28584 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28585 SECTION_DEBUG | SECTION_EXCLUDE,
28586 NULL);
28587 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28588 SECTION_DEBUG | SECTION_EXCLUDE,
28589 NULL);
28590 debug_macinfo_section_name
28591 = ((dwarf_strict && dwarf_version < 5)
28592 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28593 debug_macinfo_section = get_section (debug_macinfo_section_name,
28594 SECTION_DEBUG
28595 | SECTION_EXCLUDE, NULL);
28596 }
28597 else
28598 {
28599 /* ??? Which of the following do we need early? */
28600 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28601 SECTION_DEBUG | SECTION_EXCLUDE,
28602 NULL);
28603 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28604 SECTION_DEBUG | SECTION_EXCLUDE,
28605 NULL);
28606 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28607 SECTION_DEBUG
28608 | SECTION_EXCLUDE, NULL);
28609 debug_skeleton_abbrev_section
28610 = get_section (DEBUG_LTO_ABBREV_SECTION,
28611 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28612 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28613 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28614 generation);
28615
28616 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28617 stay in the main .o, but the skeleton_line goes into the split
28618 off dwo. */
28619 debug_skeleton_line_section
28620 = get_section (DEBUG_LTO_LINE_SECTION,
28621 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28622 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28623 DEBUG_SKELETON_LINE_SECTION_LABEL,
28624 generation);
28625 debug_str_offsets_section
28626 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28627 SECTION_DEBUG | SECTION_EXCLUDE,
28628 NULL);
28629 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28630 DEBUG_SKELETON_INFO_SECTION_LABEL,
28631 generation);
28632 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28633 DEBUG_STR_DWO_SECTION_FLAGS,
28634 NULL);
28635 debug_macinfo_section_name
28636 = ((dwarf_strict && dwarf_version < 5)
28637 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28638 debug_macinfo_section = get_section (debug_macinfo_section_name,
28639 SECTION_DEBUG | SECTION_EXCLUDE,
28640 NULL);
28641 }
28642 /* For macro info and the file table we have to refer to a
28643 debug_line section. */
28644 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28645 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28646 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28647 DEBUG_LINE_SECTION_LABEL, generation);
28648
28649 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28650 DEBUG_STR_SECTION_FLAGS
28651 | SECTION_EXCLUDE, NULL);
28652 if (!dwarf_split_debug_info)
28653 debug_line_str_section
28654 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28655 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28656 }
28657 else
28658 {
28659 if (!dwarf_split_debug_info)
28660 {
28661 debug_info_section = get_section (DEBUG_INFO_SECTION,
28662 SECTION_DEBUG, NULL);
28663 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28664 SECTION_DEBUG, NULL);
28665 debug_loc_section = get_section (dwarf_version >= 5
28666 ? DEBUG_LOCLISTS_SECTION
28667 : DEBUG_LOC_SECTION,
28668 SECTION_DEBUG, NULL);
28669 debug_macinfo_section_name
28670 = ((dwarf_strict && dwarf_version < 5)
28671 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28672 debug_macinfo_section = get_section (debug_macinfo_section_name,
28673 SECTION_DEBUG, NULL);
28674 }
28675 else
28676 {
28677 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28678 SECTION_DEBUG | SECTION_EXCLUDE,
28679 NULL);
28680 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28681 SECTION_DEBUG | SECTION_EXCLUDE,
28682 NULL);
28683 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28684 SECTION_DEBUG, NULL);
28685 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28686 SECTION_DEBUG, NULL);
28687 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28688 SECTION_DEBUG, NULL);
28689 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28690 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28691 generation);
28692
28693 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28694 stay in the main .o, but the skeleton_line goes into the
28695 split off dwo. */
28696 debug_skeleton_line_section
28697 = get_section (DEBUG_DWO_LINE_SECTION,
28698 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28699 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28700 DEBUG_SKELETON_LINE_SECTION_LABEL,
28701 generation);
28702 debug_str_offsets_section
28703 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28704 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28705 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28706 DEBUG_SKELETON_INFO_SECTION_LABEL,
28707 generation);
28708 debug_loc_section = get_section (dwarf_version >= 5
28709 ? DEBUG_DWO_LOCLISTS_SECTION
28710 : DEBUG_DWO_LOC_SECTION,
28711 SECTION_DEBUG | SECTION_EXCLUDE,
28712 NULL);
28713 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28714 DEBUG_STR_DWO_SECTION_FLAGS,
28715 NULL);
28716 debug_macinfo_section_name
28717 = ((dwarf_strict && dwarf_version < 5)
28718 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28719 debug_macinfo_section = get_section (debug_macinfo_section_name,
28720 SECTION_DEBUG | SECTION_EXCLUDE,
28721 NULL);
28722 }
28723 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28724 SECTION_DEBUG, NULL);
28725 debug_line_section = get_section (DEBUG_LINE_SECTION,
28726 SECTION_DEBUG, NULL);
28727 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28728 SECTION_DEBUG, NULL);
28729 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28730 SECTION_DEBUG, NULL);
28731 debug_str_section = get_section (DEBUG_STR_SECTION,
28732 DEBUG_STR_SECTION_FLAGS, NULL);
28733 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28734 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28735 DEBUG_STR_SECTION_FLAGS, NULL);
28736
28737 debug_ranges_section = get_section (dwarf_version >= 5
28738 ? DEBUG_RNGLISTS_SECTION
28739 : DEBUG_RANGES_SECTION,
28740 SECTION_DEBUG, NULL);
28741 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28742 SECTION_DEBUG, NULL);
28743 }
28744
28745 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28746 DEBUG_ABBREV_SECTION_LABEL, generation);
28747 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28748 DEBUG_INFO_SECTION_LABEL, generation);
28749 info_section_emitted = false;
28750 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28751 DEBUG_LINE_SECTION_LABEL, generation);
28752 /* There are up to 4 unique ranges labels per generation.
28753 See also output_rnglists. */
28754 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28755 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28756 if (dwarf_version >= 5 && dwarf_split_debug_info)
28757 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28758 DEBUG_RANGES_SECTION_LABEL,
28759 1 + generation * 4);
28760 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28761 DEBUG_ADDR_SECTION_LABEL, generation);
28762 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28763 (dwarf_strict && dwarf_version < 5)
28764 ? DEBUG_MACINFO_SECTION_LABEL
28765 : DEBUG_MACRO_SECTION_LABEL, generation);
28766 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28767 generation);
28768
28769 ++generation;
28770 return generation - 1;
28771 }
28772
28773 /* Set up for Dwarf output at the start of compilation. */
28774
28775 static void
28776 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28777 {
28778 /* Allocate the file_table. */
28779 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28780
28781 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28782 /* Allocate the decl_die_table. */
28783 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28784
28785 /* Allocate the decl_loc_table. */
28786 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28787
28788 /* Allocate the cached_dw_loc_list_table. */
28789 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28790
28791 /* Allocate the initial hunk of the abbrev_die_table. */
28792 vec_alloc (abbrev_die_table, 256);
28793 /* Zero-th entry is allocated, but unused. */
28794 abbrev_die_table->quick_push (NULL);
28795
28796 /* Allocate the dwarf_proc_stack_usage_map. */
28797 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28798
28799 /* Allocate the pubtypes and pubnames vectors. */
28800 vec_alloc (pubname_table, 32);
28801 vec_alloc (pubtype_table, 32);
28802
28803 vec_alloc (incomplete_types, 64);
28804
28805 vec_alloc (used_rtx_array, 32);
28806
28807 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28808 vec_alloc (macinfo_table, 64);
28809 #endif
28810
28811 /* If front-ends already registered a main translation unit but we were not
28812 ready to perform the association, do this now. */
28813 if (main_translation_unit != NULL_TREE)
28814 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28815 }
28816
28817 /* Called before compile () starts outputtting functions, variables
28818 and toplevel asms into assembly. */
28819
28820 static void
28821 dwarf2out_assembly_start (void)
28822 {
28823 if (text_section_line_info)
28824 return;
28825
28826 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28827 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28828 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28829 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28830 COLD_TEXT_SECTION_LABEL, 0);
28831 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28832
28833 switch_to_section (text_section);
28834 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28835 #endif
28836
28837 /* Make sure the line number table for .text always exists. */
28838 text_section_line_info = new_line_info_table ();
28839 text_section_line_info->end_label = text_end_label;
28840
28841 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28842 cur_line_info_table = text_section_line_info;
28843 #endif
28844
28845 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28846 && dwarf2out_do_cfi_asm ()
28847 && !dwarf2out_do_eh_frame ())
28848 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28849 }
28850
28851 /* A helper function for dwarf2out_finish called through
28852 htab_traverse. Assign a string its index. All strings must be
28853 collected into the table by the time index_string is called,
28854 because the indexing code relies on htab_traverse to traverse nodes
28855 in the same order for each run. */
28856
28857 int
28858 index_string (indirect_string_node **h, unsigned int *index)
28859 {
28860 indirect_string_node *node = *h;
28861
28862 find_string_form (node);
28863 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28864 {
28865 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28866 node->index = *index;
28867 *index += 1;
28868 }
28869 return 1;
28870 }
28871
28872 /* A helper function for output_indirect_strings called through
28873 htab_traverse. Output the offset to a string and update the
28874 current offset. */
28875
28876 int
28877 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28878 {
28879 indirect_string_node *node = *h;
28880
28881 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28882 {
28883 /* Assert that this node has been assigned an index. */
28884 gcc_assert (node->index != NO_INDEX_ASSIGNED
28885 && node->index != NOT_INDEXED);
28886 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28887 "indexed string 0x%x: %s", node->index, node->str);
28888 *offset += strlen (node->str) + 1;
28889 }
28890 return 1;
28891 }
28892
28893 /* A helper function for dwarf2out_finish called through
28894 htab_traverse. Output the indexed string. */
28895
28896 int
28897 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28898 {
28899 struct indirect_string_node *node = *h;
28900
28901 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28902 {
28903 /* Assert that the strings are output in the same order as their
28904 indexes were assigned. */
28905 gcc_assert (*cur_idx == node->index);
28906 assemble_string (node->str, strlen (node->str) + 1);
28907 *cur_idx += 1;
28908 }
28909 return 1;
28910 }
28911
28912 /* A helper function for output_indirect_strings. Counts the number
28913 of index strings offsets. Must match the logic of the functions
28914 output_index_string[_offsets] above. */
28915 int
28916 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28917 {
28918 struct indirect_string_node *node = *h;
28919
28920 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28921 *last_idx += 1;
28922 return 1;
28923 }
28924
28925 /* A helper function for dwarf2out_finish called through
28926 htab_traverse. Emit one queued .debug_str string. */
28927
28928 int
28929 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28930 {
28931 struct indirect_string_node *node = *h;
28932
28933 node->form = find_string_form (node);
28934 if (node->form == form && node->refcount > 0)
28935 {
28936 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28937 assemble_string (node->str, strlen (node->str) + 1);
28938 }
28939
28940 return 1;
28941 }
28942
28943 /* Output the indexed string table. */
28944
28945 static void
28946 output_indirect_strings (void)
28947 {
28948 switch_to_section (debug_str_section);
28949 if (!dwarf_split_debug_info)
28950 debug_str_hash->traverse<enum dwarf_form,
28951 output_indirect_string> (DW_FORM_strp);
28952 else
28953 {
28954 unsigned int offset = 0;
28955 unsigned int cur_idx = 0;
28956
28957 if (skeleton_debug_str_hash)
28958 skeleton_debug_str_hash->traverse<enum dwarf_form,
28959 output_indirect_string> (DW_FORM_strp);
28960
28961 switch_to_section (debug_str_offsets_section);
28962 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28963 header. Note that we don't need to generate a label to the
28964 actual index table following the header here, because this is
28965 for the split dwarf case only. In an .dwo file there is only
28966 one string offsets table (and one debug info section). But
28967 if we would start using string offset tables for the main (or
28968 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28969 pointing to the actual index after the header. Split dwarf
28970 units will never have a string offsets base attribute. When
28971 a split unit is moved into a .dwp file the string offsets can
28972 be found through the .debug_cu_index section table. */
28973 if (dwarf_version >= 5)
28974 {
28975 unsigned int last_idx = 0;
28976 unsigned long str_offsets_length;
28977
28978 debug_str_hash->traverse_noresize
28979 <unsigned int *, count_index_strings> (&last_idx);
28980 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28981 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28982 dw2_asm_output_data (4, 0xffffffff,
28983 "Escape value for 64-bit DWARF extension");
28984 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28985 "Length of string offsets unit");
28986 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28987 dw2_asm_output_data (2, 0, "Header zero padding");
28988 }
28989 debug_str_hash->traverse_noresize
28990 <unsigned int *, output_index_string_offset> (&offset);
28991 switch_to_section (debug_str_dwo_section);
28992 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28993 (&cur_idx);
28994 }
28995 }
28996
28997 /* Callback for htab_traverse to assign an index to an entry in the
28998 table, and to write that entry to the .debug_addr section. */
28999
29000 int
29001 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
29002 {
29003 addr_table_entry *entry = *slot;
29004
29005 if (entry->refcount == 0)
29006 {
29007 gcc_assert (entry->index == NO_INDEX_ASSIGNED
29008 || entry->index == NOT_INDEXED);
29009 return 1;
29010 }
29011
29012 gcc_assert (entry->index == *cur_index);
29013 (*cur_index)++;
29014
29015 switch (entry->kind)
29016 {
29017 case ate_kind_rtx:
29018 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29019 "0x%x", entry->index);
29020 break;
29021 case ate_kind_rtx_dtprel:
29022 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29023 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29024 DWARF2_ADDR_SIZE,
29025 entry->addr.rtl);
29026 fputc ('\n', asm_out_file);
29027 break;
29028 case ate_kind_label:
29029 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29030 "0x%x", entry->index);
29031 break;
29032 default:
29033 gcc_unreachable ();
29034 }
29035 return 1;
29036 }
29037
29038 /* A helper function for dwarf2out_finish. Counts the number
29039 of indexed addresses. Must match the logic of the functions
29040 output_addr_table_entry above. */
29041 int
29042 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29043 {
29044 addr_table_entry *entry = *slot;
29045
29046 if (entry->refcount > 0)
29047 *last_idx += 1;
29048 return 1;
29049 }
29050
29051 /* Produce the .debug_addr section. */
29052
29053 static void
29054 output_addr_table (void)
29055 {
29056 unsigned int index = 0;
29057 if (addr_index_table == NULL || addr_index_table->size () == 0)
29058 return;
29059
29060 switch_to_section (debug_addr_section);
29061 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
29062 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
29063 before DWARF5, didn't have a header for .debug_addr units.
29064 DWARF5 specifies a small header when address tables are used. */
29065 if (dwarf_version >= 5)
29066 {
29067 unsigned int last_idx = 0;
29068 unsigned long addrs_length;
29069
29070 addr_index_table->traverse_noresize
29071 <unsigned int *, count_index_addrs> (&last_idx);
29072 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
29073
29074 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29075 dw2_asm_output_data (4, 0xffffffff,
29076 "Escape value for 64-bit DWARF extension");
29077 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
29078 "Length of Address Unit");
29079 dw2_asm_output_data (2, 5, "DWARF addr version");
29080 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
29081 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
29082 }
29083 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
29084
29085 addr_index_table
29086 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29087 }
29088
29089 #if ENABLE_ASSERT_CHECKING
29090 /* Verify that all marks are clear. */
29091
29092 static void
29093 verify_marks_clear (dw_die_ref die)
29094 {
29095 dw_die_ref c;
29096
29097 gcc_assert (! die->die_mark);
29098 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29099 }
29100 #endif /* ENABLE_ASSERT_CHECKING */
29101
29102 /* Clear the marks for a die and its children.
29103 Be cool if the mark isn't set. */
29104
29105 static void
29106 prune_unmark_dies (dw_die_ref die)
29107 {
29108 dw_die_ref c;
29109
29110 if (die->die_mark)
29111 die->die_mark = 0;
29112 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29113 }
29114
29115 /* Given LOC that is referenced by a DIE we're marking as used, find all
29116 referenced DWARF procedures it references and mark them as used. */
29117
29118 static void
29119 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29120 {
29121 for (; loc != NULL; loc = loc->dw_loc_next)
29122 switch (loc->dw_loc_opc)
29123 {
29124 case DW_OP_implicit_pointer:
29125 case DW_OP_convert:
29126 case DW_OP_reinterpret:
29127 case DW_OP_GNU_implicit_pointer:
29128 case DW_OP_GNU_convert:
29129 case DW_OP_GNU_reinterpret:
29130 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29131 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29132 break;
29133 case DW_OP_GNU_variable_value:
29134 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29135 {
29136 dw_die_ref ref
29137 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29138 if (ref == NULL)
29139 break;
29140 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29141 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29142 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29143 }
29144 /* FALLTHRU */
29145 case DW_OP_call2:
29146 case DW_OP_call4:
29147 case DW_OP_call_ref:
29148 case DW_OP_const_type:
29149 case DW_OP_GNU_const_type:
29150 case DW_OP_GNU_parameter_ref:
29151 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29152 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29153 break;
29154 case DW_OP_regval_type:
29155 case DW_OP_deref_type:
29156 case DW_OP_GNU_regval_type:
29157 case DW_OP_GNU_deref_type:
29158 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29159 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29160 break;
29161 case DW_OP_entry_value:
29162 case DW_OP_GNU_entry_value:
29163 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29164 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29165 break;
29166 default:
29167 break;
29168 }
29169 }
29170
29171 /* Given DIE that we're marking as used, find any other dies
29172 it references as attributes and mark them as used. */
29173
29174 static void
29175 prune_unused_types_walk_attribs (dw_die_ref die)
29176 {
29177 dw_attr_node *a;
29178 unsigned ix;
29179
29180 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29181 {
29182 switch (AT_class (a))
29183 {
29184 /* Make sure DWARF procedures referenced by location descriptions will
29185 get emitted. */
29186 case dw_val_class_loc:
29187 prune_unused_types_walk_loc_descr (AT_loc (a));
29188 break;
29189 case dw_val_class_loc_list:
29190 for (dw_loc_list_ref list = AT_loc_list (a);
29191 list != NULL;
29192 list = list->dw_loc_next)
29193 prune_unused_types_walk_loc_descr (list->expr);
29194 break;
29195
29196 case dw_val_class_view_list:
29197 /* This points to a loc_list in another attribute, so it's
29198 already covered. */
29199 break;
29200
29201 case dw_val_class_die_ref:
29202 /* A reference to another DIE.
29203 Make sure that it will get emitted.
29204 If it was broken out into a comdat group, don't follow it. */
29205 if (! AT_ref (a)->comdat_type_p
29206 || a->dw_attr == DW_AT_specification)
29207 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29208 break;
29209
29210 case dw_val_class_str:
29211 /* Set the string's refcount to 0 so that prune_unused_types_mark
29212 accounts properly for it. */
29213 a->dw_attr_val.v.val_str->refcount = 0;
29214 break;
29215
29216 default:
29217 break;
29218 }
29219 }
29220 }
29221
29222 /* Mark the generic parameters and arguments children DIEs of DIE. */
29223
29224 static void
29225 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29226 {
29227 dw_die_ref c;
29228
29229 if (die == NULL || die->die_child == NULL)
29230 return;
29231 c = die->die_child;
29232 do
29233 {
29234 if (is_template_parameter (c))
29235 prune_unused_types_mark (c, 1);
29236 c = c->die_sib;
29237 } while (c && c != die->die_child);
29238 }
29239
29240 /* Mark DIE as being used. If DOKIDS is true, then walk down
29241 to DIE's children. */
29242
29243 static void
29244 prune_unused_types_mark (dw_die_ref die, int dokids)
29245 {
29246 dw_die_ref c;
29247
29248 if (die->die_mark == 0)
29249 {
29250 /* We haven't done this node yet. Mark it as used. */
29251 die->die_mark = 1;
29252 /* If this is the DIE of a generic type instantiation,
29253 mark the children DIEs that describe its generic parms and
29254 args. */
29255 prune_unused_types_mark_generic_parms_dies (die);
29256
29257 /* We also have to mark its parents as used.
29258 (But we don't want to mark our parent's kids due to this,
29259 unless it is a class.) */
29260 if (die->die_parent)
29261 prune_unused_types_mark (die->die_parent,
29262 class_scope_p (die->die_parent));
29263
29264 /* Mark any referenced nodes. */
29265 prune_unused_types_walk_attribs (die);
29266
29267 /* If this node is a specification,
29268 also mark the definition, if it exists. */
29269 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29270 prune_unused_types_mark (die->die_definition, 1);
29271 }
29272
29273 if (dokids && die->die_mark != 2)
29274 {
29275 /* We need to walk the children, but haven't done so yet.
29276 Remember that we've walked the kids. */
29277 die->die_mark = 2;
29278
29279 /* If this is an array type, we need to make sure our
29280 kids get marked, even if they're types. If we're
29281 breaking out types into comdat sections, do this
29282 for all type definitions. */
29283 if (die->die_tag == DW_TAG_array_type
29284 || (use_debug_types
29285 && is_type_die (die) && ! is_declaration_die (die)))
29286 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29287 else
29288 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29289 }
29290 }
29291
29292 /* For local classes, look if any static member functions were emitted
29293 and if so, mark them. */
29294
29295 static void
29296 prune_unused_types_walk_local_classes (dw_die_ref die)
29297 {
29298 dw_die_ref c;
29299
29300 if (die->die_mark == 2)
29301 return;
29302
29303 switch (die->die_tag)
29304 {
29305 case DW_TAG_structure_type:
29306 case DW_TAG_union_type:
29307 case DW_TAG_class_type:
29308 case DW_TAG_interface_type:
29309 break;
29310
29311 case DW_TAG_subprogram:
29312 if (!get_AT_flag (die, DW_AT_declaration)
29313 || die->die_definition != NULL)
29314 prune_unused_types_mark (die, 1);
29315 return;
29316
29317 default:
29318 return;
29319 }
29320
29321 /* Mark children. */
29322 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29323 }
29324
29325 /* Walk the tree DIE and mark types that we actually use. */
29326
29327 static void
29328 prune_unused_types_walk (dw_die_ref die)
29329 {
29330 dw_die_ref c;
29331
29332 /* Don't do anything if this node is already marked and
29333 children have been marked as well. */
29334 if (die->die_mark == 2)
29335 return;
29336
29337 switch (die->die_tag)
29338 {
29339 case DW_TAG_structure_type:
29340 case DW_TAG_union_type:
29341 case DW_TAG_class_type:
29342 case DW_TAG_interface_type:
29343 if (die->die_perennial_p)
29344 break;
29345
29346 for (c = die->die_parent; c; c = c->die_parent)
29347 if (c->die_tag == DW_TAG_subprogram)
29348 break;
29349
29350 /* Finding used static member functions inside of classes
29351 is needed just for local classes, because for other classes
29352 static member function DIEs with DW_AT_specification
29353 are emitted outside of the DW_TAG_*_type. If we ever change
29354 it, we'd need to call this even for non-local classes. */
29355 if (c)
29356 prune_unused_types_walk_local_classes (die);
29357
29358 /* It's a type node --- don't mark it. */
29359 return;
29360
29361 case DW_TAG_const_type:
29362 case DW_TAG_packed_type:
29363 case DW_TAG_pointer_type:
29364 case DW_TAG_reference_type:
29365 case DW_TAG_rvalue_reference_type:
29366 case DW_TAG_volatile_type:
29367 case DW_TAG_typedef:
29368 case DW_TAG_array_type:
29369 case DW_TAG_friend:
29370 case DW_TAG_enumeration_type:
29371 case DW_TAG_subroutine_type:
29372 case DW_TAG_string_type:
29373 case DW_TAG_set_type:
29374 case DW_TAG_subrange_type:
29375 case DW_TAG_ptr_to_member_type:
29376 case DW_TAG_file_type:
29377 /* Type nodes are useful only when other DIEs reference them --- don't
29378 mark them. */
29379 /* FALLTHROUGH */
29380
29381 case DW_TAG_dwarf_procedure:
29382 /* Likewise for DWARF procedures. */
29383
29384 if (die->die_perennial_p)
29385 break;
29386
29387 return;
29388
29389 case DW_TAG_variable:
29390 if (flag_debug_only_used_symbols)
29391 {
29392 if (die->die_perennial_p)
29393 break;
29394
29395 /* premark_used_variables marks external variables --- don't mark
29396 them here. But function-local externals are always considered
29397 used. */
29398 if (get_AT (die, DW_AT_external))
29399 {
29400 for (c = die->die_parent; c; c = c->die_parent)
29401 if (c->die_tag == DW_TAG_subprogram)
29402 break;
29403 if (!c)
29404 return;
29405 }
29406 }
29407 /* FALLTHROUGH */
29408
29409 default:
29410 /* Mark everything else. */
29411 break;
29412 }
29413
29414 if (die->die_mark == 0)
29415 {
29416 die->die_mark = 1;
29417
29418 /* Now, mark any dies referenced from here. */
29419 prune_unused_types_walk_attribs (die);
29420 }
29421
29422 die->die_mark = 2;
29423
29424 /* Mark children. */
29425 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29426 }
29427
29428 /* Increment the string counts on strings referred to from DIE's
29429 attributes. */
29430
29431 static void
29432 prune_unused_types_update_strings (dw_die_ref die)
29433 {
29434 dw_attr_node *a;
29435 unsigned ix;
29436
29437 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29438 if (AT_class (a) == dw_val_class_str)
29439 {
29440 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29441 s->refcount++;
29442 /* Avoid unnecessarily putting strings that are used less than
29443 twice in the hash table. */
29444 if (s->refcount
29445 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29446 {
29447 indirect_string_node **slot
29448 = debug_str_hash->find_slot_with_hash (s->str,
29449 htab_hash_string (s->str),
29450 INSERT);
29451 gcc_assert (*slot == NULL);
29452 *slot = s;
29453 }
29454 }
29455 }
29456
29457 /* Mark DIE and its children as removed. */
29458
29459 static void
29460 mark_removed (dw_die_ref die)
29461 {
29462 dw_die_ref c;
29463 die->removed = true;
29464 FOR_EACH_CHILD (die, c, mark_removed (c));
29465 }
29466
29467 /* Remove from the tree DIE any dies that aren't marked. */
29468
29469 static void
29470 prune_unused_types_prune (dw_die_ref die)
29471 {
29472 dw_die_ref c;
29473
29474 gcc_assert (die->die_mark);
29475 prune_unused_types_update_strings (die);
29476
29477 if (! die->die_child)
29478 return;
29479
29480 c = die->die_child;
29481 do {
29482 dw_die_ref prev = c, next;
29483 for (c = c->die_sib; ! c->die_mark; c = next)
29484 if (c == die->die_child)
29485 {
29486 /* No marked children between 'prev' and the end of the list. */
29487 if (prev == c)
29488 /* No marked children at all. */
29489 die->die_child = NULL;
29490 else
29491 {
29492 prev->die_sib = c->die_sib;
29493 die->die_child = prev;
29494 }
29495 c->die_sib = NULL;
29496 mark_removed (c);
29497 return;
29498 }
29499 else
29500 {
29501 next = c->die_sib;
29502 c->die_sib = NULL;
29503 mark_removed (c);
29504 }
29505
29506 if (c != prev->die_sib)
29507 prev->die_sib = c;
29508 prune_unused_types_prune (c);
29509 } while (c != die->die_child);
29510 }
29511
29512 /* Remove dies representing declarations that we never use. */
29513
29514 static void
29515 prune_unused_types (void)
29516 {
29517 unsigned int i;
29518 limbo_die_node *node;
29519 comdat_type_node *ctnode;
29520 pubname_entry *pub;
29521 dw_die_ref base_type;
29522
29523 #if ENABLE_ASSERT_CHECKING
29524 /* All the marks should already be clear. */
29525 verify_marks_clear (comp_unit_die ());
29526 for (node = limbo_die_list; node; node = node->next)
29527 verify_marks_clear (node->die);
29528 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29529 verify_marks_clear (ctnode->root_die);
29530 #endif /* ENABLE_ASSERT_CHECKING */
29531
29532 /* Mark types that are used in global variables. */
29533 premark_types_used_by_global_vars ();
29534
29535 /* Mark variables used in the symtab. */
29536 if (flag_debug_only_used_symbols)
29537 premark_used_variables ();
29538
29539 /* Set the mark on nodes that are actually used. */
29540 prune_unused_types_walk (comp_unit_die ());
29541 for (node = limbo_die_list; node; node = node->next)
29542 prune_unused_types_walk (node->die);
29543 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29544 {
29545 prune_unused_types_walk (ctnode->root_die);
29546 prune_unused_types_mark (ctnode->type_die, 1);
29547 }
29548
29549 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29550 are unusual in that they are pubnames that are the children of pubtypes.
29551 They should only be marked via their parent DW_TAG_enumeration_type die,
29552 not as roots in themselves. */
29553 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29554 if (pub->die->die_tag != DW_TAG_enumerator)
29555 prune_unused_types_mark (pub->die, 1);
29556 for (i = 0; base_types.iterate (i, &base_type); i++)
29557 prune_unused_types_mark (base_type, 1);
29558
29559 /* Also set the mark on nodes that could be referenced by
29560 DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or
29561 by DW_TAG_inlined_subroutine origins. */
29562 cgraph_node *cnode;
29563 FOR_EACH_FUNCTION (cnode)
29564 if (cnode->referred_to_p (false))
29565 {
29566 dw_die_ref die = lookup_decl_die (cnode->decl);
29567 if (die == NULL || die->die_mark)
29568 continue;
29569 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29570 if (e->caller != cnode)
29571 {
29572 prune_unused_types_mark (die, 1);
29573 break;
29574 }
29575 }
29576
29577 if (debug_str_hash)
29578 debug_str_hash->empty ();
29579 if (skeleton_debug_str_hash)
29580 skeleton_debug_str_hash->empty ();
29581 prune_unused_types_prune (comp_unit_die ());
29582 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29583 {
29584 node = *pnode;
29585 if (!node->die->die_mark)
29586 *pnode = node->next;
29587 else
29588 {
29589 prune_unused_types_prune (node->die);
29590 pnode = &node->next;
29591 }
29592 }
29593 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29594 prune_unused_types_prune (ctnode->root_die);
29595
29596 /* Leave the marks clear. */
29597 prune_unmark_dies (comp_unit_die ());
29598 for (node = limbo_die_list; node; node = node->next)
29599 prune_unmark_dies (node->die);
29600 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29601 prune_unmark_dies (ctnode->root_die);
29602 }
29603
29604 /* Helpers to manipulate hash table of comdat type units. */
29605
29606 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29607 {
29608 static inline hashval_t hash (const comdat_type_node *);
29609 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29610 };
29611
29612 inline hashval_t
29613 comdat_type_hasher::hash (const comdat_type_node *type_node)
29614 {
29615 hashval_t h;
29616 memcpy (&h, type_node->signature, sizeof (h));
29617 return h;
29618 }
29619
29620 inline bool
29621 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29622 const comdat_type_node *type_node_2)
29623 {
29624 return (! memcmp (type_node_1->signature, type_node_2->signature,
29625 DWARF_TYPE_SIGNATURE_SIZE));
29626 }
29627
29628 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29629 to the location it would have been added, should we know its
29630 DECL_ASSEMBLER_NAME when we added other attributes. This will
29631 probably improve compactness of debug info, removing equivalent
29632 abbrevs, and hide any differences caused by deferring the
29633 computation of the assembler name, triggered by e.g. PCH. */
29634
29635 static inline void
29636 move_linkage_attr (dw_die_ref die)
29637 {
29638 unsigned ix = vec_safe_length (die->die_attr);
29639 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29640
29641 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29642 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29643
29644 while (--ix > 0)
29645 {
29646 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29647
29648 if (prev->dw_attr == DW_AT_decl_line
29649 || prev->dw_attr == DW_AT_decl_column
29650 || prev->dw_attr == DW_AT_name)
29651 break;
29652 }
29653
29654 if (ix != vec_safe_length (die->die_attr) - 1)
29655 {
29656 die->die_attr->pop ();
29657 die->die_attr->quick_insert (ix, linkage);
29658 }
29659 }
29660
29661 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29662 referenced from typed stack ops and count how often they are used. */
29663
29664 static void
29665 mark_base_types (dw_loc_descr_ref loc)
29666 {
29667 dw_die_ref base_type = NULL;
29668
29669 for (; loc; loc = loc->dw_loc_next)
29670 {
29671 switch (loc->dw_loc_opc)
29672 {
29673 case DW_OP_regval_type:
29674 case DW_OP_deref_type:
29675 case DW_OP_GNU_regval_type:
29676 case DW_OP_GNU_deref_type:
29677 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29678 break;
29679 case DW_OP_convert:
29680 case DW_OP_reinterpret:
29681 case DW_OP_GNU_convert:
29682 case DW_OP_GNU_reinterpret:
29683 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29684 continue;
29685 /* FALLTHRU */
29686 case DW_OP_const_type:
29687 case DW_OP_GNU_const_type:
29688 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29689 break;
29690 case DW_OP_entry_value:
29691 case DW_OP_GNU_entry_value:
29692 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29693 continue;
29694 default:
29695 continue;
29696 }
29697 gcc_assert (base_type->die_parent == comp_unit_die ());
29698 if (base_type->die_mark)
29699 base_type->die_mark++;
29700 else
29701 {
29702 base_types.safe_push (base_type);
29703 base_type->die_mark = 1;
29704 }
29705 }
29706 }
29707
29708 /* Comparison function for sorting marked base types. */
29709
29710 static int
29711 base_type_cmp (const void *x, const void *y)
29712 {
29713 dw_die_ref dx = *(const dw_die_ref *) x;
29714 dw_die_ref dy = *(const dw_die_ref *) y;
29715 unsigned int byte_size1, byte_size2;
29716 unsigned int encoding1, encoding2;
29717 unsigned int align1, align2;
29718 if (dx->die_mark > dy->die_mark)
29719 return -1;
29720 if (dx->die_mark < dy->die_mark)
29721 return 1;
29722 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29723 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29724 if (byte_size1 < byte_size2)
29725 return 1;
29726 if (byte_size1 > byte_size2)
29727 return -1;
29728 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29729 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29730 if (encoding1 < encoding2)
29731 return 1;
29732 if (encoding1 > encoding2)
29733 return -1;
29734 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29735 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29736 if (align1 < align2)
29737 return 1;
29738 if (align1 > align2)
29739 return -1;
29740 return 0;
29741 }
29742
29743 /* Move base types marked by mark_base_types as early as possible
29744 in the CU, sorted by decreasing usage count both to make the
29745 uleb128 references as small as possible and to make sure they
29746 will have die_offset already computed by calc_die_sizes when
29747 sizes of typed stack loc ops is computed. */
29748
29749 static void
29750 move_marked_base_types (void)
29751 {
29752 unsigned int i;
29753 dw_die_ref base_type, die, c;
29754
29755 if (base_types.is_empty ())
29756 return;
29757
29758 /* Sort by decreasing usage count, they will be added again in that
29759 order later on. */
29760 base_types.qsort (base_type_cmp);
29761 die = comp_unit_die ();
29762 c = die->die_child;
29763 do
29764 {
29765 dw_die_ref prev = c;
29766 c = c->die_sib;
29767 while (c->die_mark)
29768 {
29769 remove_child_with_prev (c, prev);
29770 /* As base types got marked, there must be at least
29771 one node other than DW_TAG_base_type. */
29772 gcc_assert (die->die_child != NULL);
29773 c = prev->die_sib;
29774 }
29775 }
29776 while (c != die->die_child);
29777 gcc_assert (die->die_child);
29778 c = die->die_child;
29779 for (i = 0; base_types.iterate (i, &base_type); i++)
29780 {
29781 base_type->die_mark = 0;
29782 base_type->die_sib = c->die_sib;
29783 c->die_sib = base_type;
29784 c = base_type;
29785 }
29786 }
29787
29788 /* Helper function for resolve_addr, attempt to resolve
29789 one CONST_STRING, return true if successful. Similarly verify that
29790 SYMBOL_REFs refer to variables emitted in the current CU. */
29791
29792 static bool
29793 resolve_one_addr (rtx *addr)
29794 {
29795 rtx rtl = *addr;
29796
29797 if (GET_CODE (rtl) == CONST_STRING)
29798 {
29799 size_t len = strlen (XSTR (rtl, 0)) + 1;
29800 tree t = build_string (len, XSTR (rtl, 0));
29801 tree tlen = size_int (len - 1);
29802 TREE_TYPE (t)
29803 = build_array_type (char_type_node, build_index_type (tlen));
29804 rtl = lookup_constant_def (t);
29805 if (!rtl || !MEM_P (rtl))
29806 return false;
29807 rtl = XEXP (rtl, 0);
29808 if (GET_CODE (rtl) == SYMBOL_REF
29809 && SYMBOL_REF_DECL (rtl)
29810 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29811 return false;
29812 vec_safe_push (used_rtx_array, rtl);
29813 *addr = rtl;
29814 return true;
29815 }
29816
29817 if (GET_CODE (rtl) == SYMBOL_REF
29818 && SYMBOL_REF_DECL (rtl))
29819 {
29820 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29821 {
29822 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29823 return false;
29824 }
29825 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29826 return false;
29827 }
29828
29829 if (GET_CODE (rtl) == CONST)
29830 {
29831 subrtx_ptr_iterator::array_type array;
29832 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29833 if (!resolve_one_addr (*iter))
29834 return false;
29835 }
29836
29837 return true;
29838 }
29839
29840 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29841 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29842 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29843
29844 static rtx
29845 string_cst_pool_decl (tree t)
29846 {
29847 rtx rtl = output_constant_def (t, 1);
29848 unsigned char *array;
29849 dw_loc_descr_ref l;
29850 tree decl;
29851 size_t len;
29852 dw_die_ref ref;
29853
29854 if (!rtl || !MEM_P (rtl))
29855 return NULL_RTX;
29856 rtl = XEXP (rtl, 0);
29857 if (GET_CODE (rtl) != SYMBOL_REF
29858 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29859 return NULL_RTX;
29860
29861 decl = SYMBOL_REF_DECL (rtl);
29862 if (!lookup_decl_die (decl))
29863 {
29864 len = TREE_STRING_LENGTH (t);
29865 vec_safe_push (used_rtx_array, rtl);
29866 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29867 array = ggc_vec_alloc<unsigned char> (len);
29868 memcpy (array, TREE_STRING_POINTER (t), len);
29869 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29870 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29871 l->dw_loc_oprnd2.v.val_vec.length = len;
29872 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29873 l->dw_loc_oprnd2.v.val_vec.array = array;
29874 add_AT_loc (ref, DW_AT_location, l);
29875 equate_decl_number_to_die (decl, ref);
29876 }
29877 return rtl;
29878 }
29879
29880 /* Helper function of resolve_addr_in_expr. LOC is
29881 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29882 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29883 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29884 with DW_OP_implicit_pointer if possible
29885 and return true, if unsuccessful, return false. */
29886
29887 static bool
29888 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29889 {
29890 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29891 HOST_WIDE_INT offset = 0;
29892 dw_die_ref ref = NULL;
29893 tree decl;
29894
29895 if (GET_CODE (rtl) == CONST
29896 && GET_CODE (XEXP (rtl, 0)) == PLUS
29897 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29898 {
29899 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29900 rtl = XEXP (XEXP (rtl, 0), 0);
29901 }
29902 if (GET_CODE (rtl) == CONST_STRING)
29903 {
29904 size_t len = strlen (XSTR (rtl, 0)) + 1;
29905 tree t = build_string (len, XSTR (rtl, 0));
29906 tree tlen = size_int (len - 1);
29907
29908 TREE_TYPE (t)
29909 = build_array_type (char_type_node, build_index_type (tlen));
29910 rtl = string_cst_pool_decl (t);
29911 if (!rtl)
29912 return false;
29913 }
29914 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29915 {
29916 decl = SYMBOL_REF_DECL (rtl);
29917 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29918 {
29919 ref = lookup_decl_die (decl);
29920 if (ref && (get_AT (ref, DW_AT_location)
29921 || get_AT (ref, DW_AT_const_value)))
29922 {
29923 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29924 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29925 loc->dw_loc_oprnd1.val_entry = NULL;
29926 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29927 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29928 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29929 loc->dw_loc_oprnd2.v.val_int = offset;
29930 return true;
29931 }
29932 }
29933 }
29934 return false;
29935 }
29936
29937 /* Helper function for resolve_addr, handle one location
29938 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29939 the location list couldn't be resolved. */
29940
29941 static bool
29942 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29943 {
29944 dw_loc_descr_ref keep = NULL;
29945 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29946 switch (loc->dw_loc_opc)
29947 {
29948 case DW_OP_addr:
29949 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29950 {
29951 if ((prev == NULL
29952 || prev->dw_loc_opc == DW_OP_piece
29953 || prev->dw_loc_opc == DW_OP_bit_piece)
29954 && loc->dw_loc_next
29955 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29956 && (!dwarf_strict || dwarf_version >= 5)
29957 && optimize_one_addr_into_implicit_ptr (loc))
29958 break;
29959 return false;
29960 }
29961 break;
29962 case DW_OP_GNU_addr_index:
29963 case DW_OP_addrx:
29964 case DW_OP_GNU_const_index:
29965 case DW_OP_constx:
29966 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29967 || loc->dw_loc_opc == DW_OP_addrx)
29968 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29969 || loc->dw_loc_opc == DW_OP_constx)
29970 && loc->dtprel))
29971 {
29972 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29973 if (!resolve_one_addr (&rtl))
29974 return false;
29975 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29976 loc->dw_loc_oprnd1.val_entry
29977 = add_addr_table_entry (rtl, ate_kind_rtx);
29978 }
29979 break;
29980 case DW_OP_const4u:
29981 case DW_OP_const8u:
29982 if (loc->dtprel
29983 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29984 return false;
29985 break;
29986 case DW_OP_plus_uconst:
29987 if (size_of_loc_descr (loc)
29988 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29989 + 1
29990 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29991 {
29992 dw_loc_descr_ref repl
29993 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29994 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29995 add_loc_descr (&repl, loc->dw_loc_next);
29996 *loc = *repl;
29997 }
29998 break;
29999 case DW_OP_implicit_value:
30000 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
30001 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
30002 return false;
30003 break;
30004 case DW_OP_implicit_pointer:
30005 case DW_OP_GNU_implicit_pointer:
30006 case DW_OP_GNU_parameter_ref:
30007 case DW_OP_GNU_variable_value:
30008 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30009 {
30010 dw_die_ref ref
30011 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
30012 if (ref == NULL)
30013 return false;
30014 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30015 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30016 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30017 }
30018 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30019 {
30020 if (prev == NULL
30021 && loc->dw_loc_next == NULL
30022 && AT_class (a) == dw_val_class_loc)
30023 switch (a->dw_attr)
30024 {
30025 /* Following attributes allow both exprloc and reference,
30026 so if the whole expression is DW_OP_GNU_variable_value
30027 alone we could transform it into reference. */
30028 case DW_AT_byte_size:
30029 case DW_AT_bit_size:
30030 case DW_AT_lower_bound:
30031 case DW_AT_upper_bound:
30032 case DW_AT_bit_stride:
30033 case DW_AT_count:
30034 case DW_AT_allocated:
30035 case DW_AT_associated:
30036 case DW_AT_byte_stride:
30037 a->dw_attr_val.val_class = dw_val_class_die_ref;
30038 a->dw_attr_val.val_entry = NULL;
30039 a->dw_attr_val.v.val_die_ref.die
30040 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30041 a->dw_attr_val.v.val_die_ref.external = 0;
30042 return true;
30043 default:
30044 break;
30045 }
30046 if (dwarf_strict)
30047 return false;
30048 }
30049 break;
30050 case DW_OP_const_type:
30051 case DW_OP_regval_type:
30052 case DW_OP_deref_type:
30053 case DW_OP_convert:
30054 case DW_OP_reinterpret:
30055 case DW_OP_GNU_const_type:
30056 case DW_OP_GNU_regval_type:
30057 case DW_OP_GNU_deref_type:
30058 case DW_OP_GNU_convert:
30059 case DW_OP_GNU_reinterpret:
30060 while (loc->dw_loc_next
30061 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30062 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30063 {
30064 dw_die_ref base1, base2;
30065 unsigned enc1, enc2, size1, size2;
30066 if (loc->dw_loc_opc == DW_OP_regval_type
30067 || loc->dw_loc_opc == DW_OP_deref_type
30068 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30069 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30070 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30071 else if (loc->dw_loc_oprnd1.val_class
30072 == dw_val_class_unsigned_const)
30073 break;
30074 else
30075 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30076 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30077 == dw_val_class_unsigned_const)
30078 break;
30079 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30080 gcc_assert (base1->die_tag == DW_TAG_base_type
30081 && base2->die_tag == DW_TAG_base_type);
30082 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30083 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30084 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30085 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30086 if (size1 == size2
30087 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30088 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30089 && loc != keep)
30090 || enc1 == enc2))
30091 {
30092 /* Optimize away next DW_OP_convert after
30093 adjusting LOC's base type die reference. */
30094 if (loc->dw_loc_opc == DW_OP_regval_type
30095 || loc->dw_loc_opc == DW_OP_deref_type
30096 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30097 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30098 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30099 else
30100 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30101 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30102 continue;
30103 }
30104 /* Don't change integer DW_OP_convert after e.g. floating
30105 point typed stack entry. */
30106 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30107 keep = loc->dw_loc_next;
30108 break;
30109 }
30110 break;
30111 default:
30112 break;
30113 }
30114 return true;
30115 }
30116
30117 /* Helper function of resolve_addr. DIE had DW_AT_location of
30118 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30119 and DW_OP_addr couldn't be resolved. resolve_addr has already
30120 removed the DW_AT_location attribute. This function attempts to
30121 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30122 to it or DW_AT_const_value attribute, if possible. */
30123
30124 static void
30125 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30126 {
30127 if (!VAR_P (decl)
30128 || lookup_decl_die (decl) != die
30129 || DECL_EXTERNAL (decl)
30130 || !TREE_STATIC (decl)
30131 || DECL_INITIAL (decl) == NULL_TREE
30132 || DECL_P (DECL_INITIAL (decl))
30133 || get_AT (die, DW_AT_const_value))
30134 return;
30135
30136 tree init = DECL_INITIAL (decl);
30137 HOST_WIDE_INT offset = 0;
30138 /* For variables that have been optimized away and thus
30139 don't have a memory location, see if we can emit
30140 DW_AT_const_value instead. */
30141 if (tree_add_const_value_attribute (die, init))
30142 return;
30143 if (dwarf_strict && dwarf_version < 5)
30144 return;
30145 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30146 and ADDR_EXPR refers to a decl that has DW_AT_location or
30147 DW_AT_const_value (but isn't addressable, otherwise
30148 resolving the original DW_OP_addr wouldn't fail), see if
30149 we can add DW_OP_implicit_pointer. */
30150 STRIP_NOPS (init);
30151 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30152 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30153 {
30154 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30155 init = TREE_OPERAND (init, 0);
30156 STRIP_NOPS (init);
30157 }
30158 if (TREE_CODE (init) != ADDR_EXPR)
30159 return;
30160 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30161 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30162 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30163 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30164 && TREE_OPERAND (init, 0) != decl))
30165 {
30166 dw_die_ref ref;
30167 dw_loc_descr_ref l;
30168
30169 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30170 {
30171 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30172 if (!rtl)
30173 return;
30174 decl = SYMBOL_REF_DECL (rtl);
30175 }
30176 else
30177 decl = TREE_OPERAND (init, 0);
30178 ref = lookup_decl_die (decl);
30179 if (ref == NULL
30180 || (!get_AT (ref, DW_AT_location)
30181 && !get_AT (ref, DW_AT_const_value)))
30182 return;
30183 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30184 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30185 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30186 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30187 add_AT_loc (die, DW_AT_location, l);
30188 }
30189 }
30190
30191 /* Return NULL if l is a DWARF expression, or first op that is not
30192 valid DWARF expression. */
30193
30194 static dw_loc_descr_ref
30195 non_dwarf_expression (dw_loc_descr_ref l)
30196 {
30197 while (l)
30198 {
30199 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30200 return l;
30201 switch (l->dw_loc_opc)
30202 {
30203 case DW_OP_regx:
30204 case DW_OP_implicit_value:
30205 case DW_OP_stack_value:
30206 case DW_OP_implicit_pointer:
30207 case DW_OP_GNU_implicit_pointer:
30208 case DW_OP_GNU_parameter_ref:
30209 case DW_OP_piece:
30210 case DW_OP_bit_piece:
30211 return l;
30212 default:
30213 break;
30214 }
30215 l = l->dw_loc_next;
30216 }
30217 return NULL;
30218 }
30219
30220 /* Return adjusted copy of EXPR:
30221 If it is empty DWARF expression, return it.
30222 If it is valid non-empty DWARF expression,
30223 return copy of EXPR with DW_OP_deref appended to it.
30224 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30225 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30226 If it is DWARF expression followed by DW_OP_stack_value, return
30227 copy of the DWARF expression without anything appended.
30228 Otherwise, return NULL. */
30229
30230 static dw_loc_descr_ref
30231 copy_deref_exprloc (dw_loc_descr_ref expr)
30232 {
30233 dw_loc_descr_ref tail = NULL;
30234
30235 if (expr == NULL)
30236 return NULL;
30237
30238 dw_loc_descr_ref l = non_dwarf_expression (expr);
30239 if (l && l->dw_loc_next)
30240 return NULL;
30241
30242 if (l)
30243 {
30244 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30245 tail = new_loc_descr ((enum dwarf_location_atom)
30246 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30247 0, 0);
30248 else
30249 switch (l->dw_loc_opc)
30250 {
30251 case DW_OP_regx:
30252 tail = new_loc_descr (DW_OP_bregx,
30253 l->dw_loc_oprnd1.v.val_unsigned, 0);
30254 break;
30255 case DW_OP_stack_value:
30256 break;
30257 default:
30258 return NULL;
30259 }
30260 }
30261 else
30262 tail = new_loc_descr (DW_OP_deref, 0, 0);
30263
30264 dw_loc_descr_ref ret = NULL, *p = &ret;
30265 while (expr != l)
30266 {
30267 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30268 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30269 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30270 p = &(*p)->dw_loc_next;
30271 expr = expr->dw_loc_next;
30272 }
30273 *p = tail;
30274 return ret;
30275 }
30276
30277 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30278 reference to a variable or argument, adjust it if needed and return:
30279 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30280 attribute if present should be removed
30281 0 keep the attribute perhaps with minor modifications, no need to rescan
30282 1 if the attribute has been successfully adjusted. */
30283
30284 static int
30285 optimize_string_length (dw_attr_node *a)
30286 {
30287 dw_loc_descr_ref l = AT_loc (a), lv;
30288 dw_die_ref die;
30289 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30290 {
30291 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30292 die = lookup_decl_die (decl);
30293 if (die)
30294 {
30295 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30296 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30297 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30298 }
30299 else
30300 return -1;
30301 }
30302 else
30303 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30304
30305 /* DWARF5 allows reference class, so we can then reference the DIE.
30306 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30307 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30308 {
30309 a->dw_attr_val.val_class = dw_val_class_die_ref;
30310 a->dw_attr_val.val_entry = NULL;
30311 a->dw_attr_val.v.val_die_ref.die = die;
30312 a->dw_attr_val.v.val_die_ref.external = 0;
30313 return 0;
30314 }
30315
30316 dw_attr_node *av = get_AT (die, DW_AT_location);
30317 dw_loc_list_ref d;
30318 bool non_dwarf_expr = false;
30319
30320 if (av == NULL)
30321 return dwarf_strict ? -1 : 0;
30322 switch (AT_class (av))
30323 {
30324 case dw_val_class_loc_list:
30325 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30326 if (d->expr && non_dwarf_expression (d->expr))
30327 non_dwarf_expr = true;
30328 break;
30329 case dw_val_class_view_list:
30330 gcc_unreachable ();
30331 case dw_val_class_loc:
30332 lv = AT_loc (av);
30333 if (lv == NULL)
30334 return dwarf_strict ? -1 : 0;
30335 if (non_dwarf_expression (lv))
30336 non_dwarf_expr = true;
30337 break;
30338 default:
30339 return dwarf_strict ? -1 : 0;
30340 }
30341
30342 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30343 into DW_OP_call4 or DW_OP_GNU_variable_value into
30344 DW_OP_call4 DW_OP_deref, do so. */
30345 if (!non_dwarf_expr
30346 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30347 {
30348 l->dw_loc_opc = DW_OP_call4;
30349 if (l->dw_loc_next)
30350 l->dw_loc_next = NULL;
30351 else
30352 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30353 return 0;
30354 }
30355
30356 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30357 copy over the DW_AT_location attribute from die to a. */
30358 if (l->dw_loc_next != NULL)
30359 {
30360 a->dw_attr_val = av->dw_attr_val;
30361 return 1;
30362 }
30363
30364 dw_loc_list_ref list, *p;
30365 switch (AT_class (av))
30366 {
30367 case dw_val_class_loc_list:
30368 p = &list;
30369 list = NULL;
30370 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30371 {
30372 lv = copy_deref_exprloc (d->expr);
30373 if (lv)
30374 {
30375 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30376 p = &(*p)->dw_loc_next;
30377 }
30378 else if (!dwarf_strict && d->expr)
30379 return 0;
30380 }
30381 if (list == NULL)
30382 return dwarf_strict ? -1 : 0;
30383 a->dw_attr_val.val_class = dw_val_class_loc_list;
30384 gen_llsym (list);
30385 *AT_loc_list_ptr (a) = list;
30386 return 1;
30387 case dw_val_class_loc:
30388 lv = copy_deref_exprloc (AT_loc (av));
30389 if (lv == NULL)
30390 return dwarf_strict ? -1 : 0;
30391 a->dw_attr_val.v.val_loc = lv;
30392 return 1;
30393 default:
30394 gcc_unreachable ();
30395 }
30396 }
30397
30398 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30399 an address in .rodata section if the string literal is emitted there,
30400 or remove the containing location list or replace DW_AT_const_value
30401 with DW_AT_location and empty location expression, if it isn't found
30402 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30403 to something that has been emitted in the current CU. */
30404
30405 static void
30406 resolve_addr (dw_die_ref die)
30407 {
30408 dw_die_ref c;
30409 dw_attr_node *a;
30410 dw_loc_list_ref *curr, *start, loc;
30411 unsigned ix;
30412 bool remove_AT_byte_size = false;
30413
30414 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30415 switch (AT_class (a))
30416 {
30417 case dw_val_class_loc_list:
30418 start = curr = AT_loc_list_ptr (a);
30419 loc = *curr;
30420 gcc_assert (loc);
30421 /* The same list can be referenced more than once. See if we have
30422 already recorded the result from a previous pass. */
30423 if (loc->replaced)
30424 *curr = loc->dw_loc_next;
30425 else if (!loc->resolved_addr)
30426 {
30427 /* As things stand, we do not expect or allow one die to
30428 reference a suffix of another die's location list chain.
30429 References must be identical or completely separate.
30430 There is therefore no need to cache the result of this
30431 pass on any list other than the first; doing so
30432 would lead to unnecessary writes. */
30433 while (*curr)
30434 {
30435 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30436 if (!resolve_addr_in_expr (a, (*curr)->expr))
30437 {
30438 dw_loc_list_ref next = (*curr)->dw_loc_next;
30439 dw_loc_descr_ref l = (*curr)->expr;
30440
30441 if (next && (*curr)->ll_symbol)
30442 {
30443 gcc_assert (!next->ll_symbol);
30444 next->ll_symbol = (*curr)->ll_symbol;
30445 next->vl_symbol = (*curr)->vl_symbol;
30446 }
30447 if (dwarf_split_debug_info)
30448 remove_loc_list_addr_table_entries (l);
30449 *curr = next;
30450 }
30451 else
30452 {
30453 mark_base_types ((*curr)->expr);
30454 curr = &(*curr)->dw_loc_next;
30455 }
30456 }
30457 if (loc == *start)
30458 loc->resolved_addr = 1;
30459 else
30460 {
30461 loc->replaced = 1;
30462 loc->dw_loc_next = *start;
30463 }
30464 }
30465 if (!*start)
30466 {
30467 remove_AT (die, a->dw_attr);
30468 ix--;
30469 }
30470 break;
30471 case dw_val_class_view_list:
30472 {
30473 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30474 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30475 dw_val_node *llnode
30476 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30477 /* If we no longer have a loclist, or it no longer needs
30478 views, drop this attribute. */
30479 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30480 {
30481 remove_AT (die, a->dw_attr);
30482 ix--;
30483 }
30484 break;
30485 }
30486 case dw_val_class_loc:
30487 {
30488 dw_loc_descr_ref l = AT_loc (a);
30489 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30490 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30491 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30492 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30493 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30494 with DW_FORM_ref referencing the same DIE as
30495 DW_OP_GNU_variable_value used to reference. */
30496 if (a->dw_attr == DW_AT_string_length
30497 && l
30498 && l->dw_loc_opc == DW_OP_GNU_variable_value
30499 && (l->dw_loc_next == NULL
30500 || (l->dw_loc_next->dw_loc_next == NULL
30501 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30502 {
30503 switch (optimize_string_length (a))
30504 {
30505 case -1:
30506 remove_AT (die, a->dw_attr);
30507 ix--;
30508 /* If we drop DW_AT_string_length, we need to drop also
30509 DW_AT_{string_length_,}byte_size. */
30510 remove_AT_byte_size = true;
30511 continue;
30512 default:
30513 break;
30514 case 1:
30515 /* Even if we keep the optimized DW_AT_string_length,
30516 it might have changed AT_class, so process it again. */
30517 ix--;
30518 continue;
30519 }
30520 }
30521 /* For -gdwarf-2 don't attempt to optimize
30522 DW_AT_data_member_location containing
30523 DW_OP_plus_uconst - older consumers might
30524 rely on it being that op instead of a more complex,
30525 but shorter, location description. */
30526 if ((dwarf_version > 2
30527 || a->dw_attr != DW_AT_data_member_location
30528 || l == NULL
30529 || l->dw_loc_opc != DW_OP_plus_uconst
30530 || l->dw_loc_next != NULL)
30531 && !resolve_addr_in_expr (a, l))
30532 {
30533 if (dwarf_split_debug_info)
30534 remove_loc_list_addr_table_entries (l);
30535 if (l != NULL
30536 && l->dw_loc_next == NULL
30537 && l->dw_loc_opc == DW_OP_addr
30538 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30539 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30540 && a->dw_attr == DW_AT_location)
30541 {
30542 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30543 remove_AT (die, a->dw_attr);
30544 ix--;
30545 optimize_location_into_implicit_ptr (die, decl);
30546 break;
30547 }
30548 if (a->dw_attr == DW_AT_string_length)
30549 /* If we drop DW_AT_string_length, we need to drop also
30550 DW_AT_{string_length_,}byte_size. */
30551 remove_AT_byte_size = true;
30552 remove_AT (die, a->dw_attr);
30553 ix--;
30554 }
30555 else
30556 mark_base_types (l);
30557 }
30558 break;
30559 case dw_val_class_addr:
30560 if (a->dw_attr == DW_AT_const_value
30561 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30562 {
30563 if (AT_index (a) != NOT_INDEXED)
30564 remove_addr_table_entry (a->dw_attr_val.val_entry);
30565 remove_AT (die, a->dw_attr);
30566 ix--;
30567 }
30568 if ((die->die_tag == DW_TAG_call_site
30569 && a->dw_attr == DW_AT_call_origin)
30570 || (die->die_tag == DW_TAG_GNU_call_site
30571 && a->dw_attr == DW_AT_abstract_origin))
30572 {
30573 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30574 dw_die_ref tdie = lookup_decl_die (tdecl);
30575 dw_die_ref cdie;
30576 if (tdie == NULL
30577 && DECL_EXTERNAL (tdecl)
30578 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30579 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30580 {
30581 dw_die_ref pdie = cdie;
30582 /* Make sure we don't add these DIEs into type units.
30583 We could emit skeleton DIEs for context (namespaces,
30584 outer structs/classes) and a skeleton DIE for the
30585 innermost context with DW_AT_signature pointing to the
30586 type unit. See PR78835. */
30587 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30588 pdie = pdie->die_parent;
30589 if (pdie == NULL)
30590 {
30591 /* Creating a full DIE for tdecl is overly expensive and
30592 at this point even wrong when in the LTO phase
30593 as it can end up generating new type DIEs we didn't
30594 output and thus optimize_external_refs will crash. */
30595 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30596 add_AT_flag (tdie, DW_AT_external, 1);
30597 add_AT_flag (tdie, DW_AT_declaration, 1);
30598 add_linkage_attr (tdie, tdecl);
30599 add_name_and_src_coords_attributes (tdie, tdecl, true);
30600 equate_decl_number_to_die (tdecl, tdie);
30601 }
30602 }
30603 if (tdie)
30604 {
30605 a->dw_attr_val.val_class = dw_val_class_die_ref;
30606 a->dw_attr_val.v.val_die_ref.die = tdie;
30607 a->dw_attr_val.v.val_die_ref.external = 0;
30608 }
30609 else
30610 {
30611 if (AT_index (a) != NOT_INDEXED)
30612 remove_addr_table_entry (a->dw_attr_val.val_entry);
30613 remove_AT (die, a->dw_attr);
30614 ix--;
30615 }
30616 }
30617 break;
30618 default:
30619 break;
30620 }
30621
30622 if (remove_AT_byte_size)
30623 remove_AT (die, dwarf_version >= 5
30624 ? DW_AT_string_length_byte_size
30625 : DW_AT_byte_size);
30626
30627 FOR_EACH_CHILD (die, c, resolve_addr (c));
30628 }
30629 \f
30630 /* Helper routines for optimize_location_lists.
30631 This pass tries to share identical local lists in .debug_loc
30632 section. */
30633
30634 /* Iteratively hash operands of LOC opcode into HSTATE. */
30635
30636 static void
30637 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30638 {
30639 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30640 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30641
30642 switch (loc->dw_loc_opc)
30643 {
30644 case DW_OP_const4u:
30645 case DW_OP_const8u:
30646 if (loc->dtprel)
30647 goto hash_addr;
30648 /* FALLTHRU */
30649 case DW_OP_const1u:
30650 case DW_OP_const1s:
30651 case DW_OP_const2u:
30652 case DW_OP_const2s:
30653 case DW_OP_const4s:
30654 case DW_OP_const8s:
30655 case DW_OP_constu:
30656 case DW_OP_consts:
30657 case DW_OP_pick:
30658 case DW_OP_plus_uconst:
30659 case DW_OP_breg0:
30660 case DW_OP_breg1:
30661 case DW_OP_breg2:
30662 case DW_OP_breg3:
30663 case DW_OP_breg4:
30664 case DW_OP_breg5:
30665 case DW_OP_breg6:
30666 case DW_OP_breg7:
30667 case DW_OP_breg8:
30668 case DW_OP_breg9:
30669 case DW_OP_breg10:
30670 case DW_OP_breg11:
30671 case DW_OP_breg12:
30672 case DW_OP_breg13:
30673 case DW_OP_breg14:
30674 case DW_OP_breg15:
30675 case DW_OP_breg16:
30676 case DW_OP_breg17:
30677 case DW_OP_breg18:
30678 case DW_OP_breg19:
30679 case DW_OP_breg20:
30680 case DW_OP_breg21:
30681 case DW_OP_breg22:
30682 case DW_OP_breg23:
30683 case DW_OP_breg24:
30684 case DW_OP_breg25:
30685 case DW_OP_breg26:
30686 case DW_OP_breg27:
30687 case DW_OP_breg28:
30688 case DW_OP_breg29:
30689 case DW_OP_breg30:
30690 case DW_OP_breg31:
30691 case DW_OP_regx:
30692 case DW_OP_fbreg:
30693 case DW_OP_piece:
30694 case DW_OP_deref_size:
30695 case DW_OP_xderef_size:
30696 hstate.add_object (val1->v.val_int);
30697 break;
30698 case DW_OP_skip:
30699 case DW_OP_bra:
30700 {
30701 int offset;
30702
30703 gcc_assert (val1->val_class == dw_val_class_loc);
30704 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30705 hstate.add_object (offset);
30706 }
30707 break;
30708 case DW_OP_implicit_value:
30709 hstate.add_object (val1->v.val_unsigned);
30710 switch (val2->val_class)
30711 {
30712 case dw_val_class_const:
30713 hstate.add_object (val2->v.val_int);
30714 break;
30715 case dw_val_class_vec:
30716 {
30717 unsigned int elt_size = val2->v.val_vec.elt_size;
30718 unsigned int len = val2->v.val_vec.length;
30719
30720 hstate.add_int (elt_size);
30721 hstate.add_int (len);
30722 hstate.add (val2->v.val_vec.array, len * elt_size);
30723 }
30724 break;
30725 case dw_val_class_const_double:
30726 hstate.add_object (val2->v.val_double.low);
30727 hstate.add_object (val2->v.val_double.high);
30728 break;
30729 case dw_val_class_wide_int:
30730 hstate.add (val2->v.val_wide->get_val (),
30731 get_full_len (*val2->v.val_wide)
30732 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30733 break;
30734 case dw_val_class_addr:
30735 inchash::add_rtx (val2->v.val_addr, hstate);
30736 break;
30737 default:
30738 gcc_unreachable ();
30739 }
30740 break;
30741 case DW_OP_bregx:
30742 case DW_OP_bit_piece:
30743 hstate.add_object (val1->v.val_int);
30744 hstate.add_object (val2->v.val_int);
30745 break;
30746 case DW_OP_addr:
30747 hash_addr:
30748 if (loc->dtprel)
30749 {
30750 unsigned char dtprel = 0xd1;
30751 hstate.add_object (dtprel);
30752 }
30753 inchash::add_rtx (val1->v.val_addr, hstate);
30754 break;
30755 case DW_OP_GNU_addr_index:
30756 case DW_OP_addrx:
30757 case DW_OP_GNU_const_index:
30758 case DW_OP_constx:
30759 {
30760 if (loc->dtprel)
30761 {
30762 unsigned char dtprel = 0xd1;
30763 hstate.add_object (dtprel);
30764 }
30765 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30766 }
30767 break;
30768 case DW_OP_implicit_pointer:
30769 case DW_OP_GNU_implicit_pointer:
30770 hstate.add_int (val2->v.val_int);
30771 break;
30772 case DW_OP_entry_value:
30773 case DW_OP_GNU_entry_value:
30774 hstate.add_object (val1->v.val_loc);
30775 break;
30776 case DW_OP_regval_type:
30777 case DW_OP_deref_type:
30778 case DW_OP_GNU_regval_type:
30779 case DW_OP_GNU_deref_type:
30780 {
30781 unsigned int byte_size
30782 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30783 unsigned int encoding
30784 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30785 hstate.add_object (val1->v.val_int);
30786 hstate.add_object (byte_size);
30787 hstate.add_object (encoding);
30788 }
30789 break;
30790 case DW_OP_convert:
30791 case DW_OP_reinterpret:
30792 case DW_OP_GNU_convert:
30793 case DW_OP_GNU_reinterpret:
30794 if (val1->val_class == dw_val_class_unsigned_const)
30795 {
30796 hstate.add_object (val1->v.val_unsigned);
30797 break;
30798 }
30799 /* FALLTHRU */
30800 case DW_OP_const_type:
30801 case DW_OP_GNU_const_type:
30802 {
30803 unsigned int byte_size
30804 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30805 unsigned int encoding
30806 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30807 hstate.add_object (byte_size);
30808 hstate.add_object (encoding);
30809 if (loc->dw_loc_opc != DW_OP_const_type
30810 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30811 break;
30812 hstate.add_object (val2->val_class);
30813 switch (val2->val_class)
30814 {
30815 case dw_val_class_const:
30816 hstate.add_object (val2->v.val_int);
30817 break;
30818 case dw_val_class_vec:
30819 {
30820 unsigned int elt_size = val2->v.val_vec.elt_size;
30821 unsigned int len = val2->v.val_vec.length;
30822
30823 hstate.add_object (elt_size);
30824 hstate.add_object (len);
30825 hstate.add (val2->v.val_vec.array, len * elt_size);
30826 }
30827 break;
30828 case dw_val_class_const_double:
30829 hstate.add_object (val2->v.val_double.low);
30830 hstate.add_object (val2->v.val_double.high);
30831 break;
30832 case dw_val_class_wide_int:
30833 hstate.add (val2->v.val_wide->get_val (),
30834 get_full_len (*val2->v.val_wide)
30835 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30836 break;
30837 default:
30838 gcc_unreachable ();
30839 }
30840 }
30841 break;
30842
30843 default:
30844 /* Other codes have no operands. */
30845 break;
30846 }
30847 }
30848
30849 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30850
30851 static inline void
30852 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30853 {
30854 dw_loc_descr_ref l;
30855 bool sizes_computed = false;
30856 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30857 size_of_locs (loc);
30858
30859 for (l = loc; l != NULL; l = l->dw_loc_next)
30860 {
30861 enum dwarf_location_atom opc = l->dw_loc_opc;
30862 hstate.add_object (opc);
30863 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30864 {
30865 size_of_locs (loc);
30866 sizes_computed = true;
30867 }
30868 hash_loc_operands (l, hstate);
30869 }
30870 }
30871
30872 /* Compute hash of the whole location list LIST_HEAD. */
30873
30874 static inline void
30875 hash_loc_list (dw_loc_list_ref list_head)
30876 {
30877 dw_loc_list_ref curr = list_head;
30878 inchash::hash hstate;
30879
30880 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30881 {
30882 hstate.add (curr->begin, strlen (curr->begin) + 1);
30883 hstate.add (curr->end, strlen (curr->end) + 1);
30884 hstate.add_object (curr->vbegin);
30885 hstate.add_object (curr->vend);
30886 if (curr->section)
30887 hstate.add (curr->section, strlen (curr->section) + 1);
30888 hash_locs (curr->expr, hstate);
30889 }
30890 list_head->hash = hstate.end ();
30891 }
30892
30893 /* Return true if X and Y opcodes have the same operands. */
30894
30895 static inline bool
30896 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30897 {
30898 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30899 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30900 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30901 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30902
30903 switch (x->dw_loc_opc)
30904 {
30905 case DW_OP_const4u:
30906 case DW_OP_const8u:
30907 if (x->dtprel)
30908 goto hash_addr;
30909 /* FALLTHRU */
30910 case DW_OP_const1u:
30911 case DW_OP_const1s:
30912 case DW_OP_const2u:
30913 case DW_OP_const2s:
30914 case DW_OP_const4s:
30915 case DW_OP_const8s:
30916 case DW_OP_constu:
30917 case DW_OP_consts:
30918 case DW_OP_pick:
30919 case DW_OP_plus_uconst:
30920 case DW_OP_breg0:
30921 case DW_OP_breg1:
30922 case DW_OP_breg2:
30923 case DW_OP_breg3:
30924 case DW_OP_breg4:
30925 case DW_OP_breg5:
30926 case DW_OP_breg6:
30927 case DW_OP_breg7:
30928 case DW_OP_breg8:
30929 case DW_OP_breg9:
30930 case DW_OP_breg10:
30931 case DW_OP_breg11:
30932 case DW_OP_breg12:
30933 case DW_OP_breg13:
30934 case DW_OP_breg14:
30935 case DW_OP_breg15:
30936 case DW_OP_breg16:
30937 case DW_OP_breg17:
30938 case DW_OP_breg18:
30939 case DW_OP_breg19:
30940 case DW_OP_breg20:
30941 case DW_OP_breg21:
30942 case DW_OP_breg22:
30943 case DW_OP_breg23:
30944 case DW_OP_breg24:
30945 case DW_OP_breg25:
30946 case DW_OP_breg26:
30947 case DW_OP_breg27:
30948 case DW_OP_breg28:
30949 case DW_OP_breg29:
30950 case DW_OP_breg30:
30951 case DW_OP_breg31:
30952 case DW_OP_regx:
30953 case DW_OP_fbreg:
30954 case DW_OP_piece:
30955 case DW_OP_deref_size:
30956 case DW_OP_xderef_size:
30957 return valx1->v.val_int == valy1->v.val_int;
30958 case DW_OP_skip:
30959 case DW_OP_bra:
30960 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30961 can cause irrelevant differences in dw_loc_addr. */
30962 gcc_assert (valx1->val_class == dw_val_class_loc
30963 && valy1->val_class == dw_val_class_loc
30964 && (dwarf_split_debug_info
30965 || x->dw_loc_addr == y->dw_loc_addr));
30966 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30967 case DW_OP_implicit_value:
30968 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30969 || valx2->val_class != valy2->val_class)
30970 return false;
30971 switch (valx2->val_class)
30972 {
30973 case dw_val_class_const:
30974 return valx2->v.val_int == valy2->v.val_int;
30975 case dw_val_class_vec:
30976 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30977 && valx2->v.val_vec.length == valy2->v.val_vec.length
30978 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30979 valx2->v.val_vec.elt_size
30980 * valx2->v.val_vec.length) == 0;
30981 case dw_val_class_const_double:
30982 return valx2->v.val_double.low == valy2->v.val_double.low
30983 && valx2->v.val_double.high == valy2->v.val_double.high;
30984 case dw_val_class_wide_int:
30985 return *valx2->v.val_wide == *valy2->v.val_wide;
30986 case dw_val_class_addr:
30987 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30988 default:
30989 gcc_unreachable ();
30990 }
30991 case DW_OP_bregx:
30992 case DW_OP_bit_piece:
30993 return valx1->v.val_int == valy1->v.val_int
30994 && valx2->v.val_int == valy2->v.val_int;
30995 case DW_OP_addr:
30996 hash_addr:
30997 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30998 case DW_OP_GNU_addr_index:
30999 case DW_OP_addrx:
31000 case DW_OP_GNU_const_index:
31001 case DW_OP_constx:
31002 {
31003 rtx ax1 = valx1->val_entry->addr.rtl;
31004 rtx ay1 = valy1->val_entry->addr.rtl;
31005 return rtx_equal_p (ax1, ay1);
31006 }
31007 case DW_OP_implicit_pointer:
31008 case DW_OP_GNU_implicit_pointer:
31009 return valx1->val_class == dw_val_class_die_ref
31010 && valx1->val_class == valy1->val_class
31011 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
31012 && valx2->v.val_int == valy2->v.val_int;
31013 case DW_OP_entry_value:
31014 case DW_OP_GNU_entry_value:
31015 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31016 case DW_OP_const_type:
31017 case DW_OP_GNU_const_type:
31018 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31019 || valx2->val_class != valy2->val_class)
31020 return false;
31021 switch (valx2->val_class)
31022 {
31023 case dw_val_class_const:
31024 return valx2->v.val_int == valy2->v.val_int;
31025 case dw_val_class_vec:
31026 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31027 && valx2->v.val_vec.length == valy2->v.val_vec.length
31028 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31029 valx2->v.val_vec.elt_size
31030 * valx2->v.val_vec.length) == 0;
31031 case dw_val_class_const_double:
31032 return valx2->v.val_double.low == valy2->v.val_double.low
31033 && valx2->v.val_double.high == valy2->v.val_double.high;
31034 case dw_val_class_wide_int:
31035 return *valx2->v.val_wide == *valy2->v.val_wide;
31036 default:
31037 gcc_unreachable ();
31038 }
31039 case DW_OP_regval_type:
31040 case DW_OP_deref_type:
31041 case DW_OP_GNU_regval_type:
31042 case DW_OP_GNU_deref_type:
31043 return valx1->v.val_int == valy1->v.val_int
31044 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31045 case DW_OP_convert:
31046 case DW_OP_reinterpret:
31047 case DW_OP_GNU_convert:
31048 case DW_OP_GNU_reinterpret:
31049 if (valx1->val_class != valy1->val_class)
31050 return false;
31051 if (valx1->val_class == dw_val_class_unsigned_const)
31052 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31053 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31054 case DW_OP_GNU_parameter_ref:
31055 return valx1->val_class == dw_val_class_die_ref
31056 && valx1->val_class == valy1->val_class
31057 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31058 default:
31059 /* Other codes have no operands. */
31060 return true;
31061 }
31062 }
31063
31064 /* Return true if DWARF location expressions X and Y are the same. */
31065
31066 static inline bool
31067 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31068 {
31069 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31070 if (x->dw_loc_opc != y->dw_loc_opc
31071 || x->dtprel != y->dtprel
31072 || !compare_loc_operands (x, y))
31073 break;
31074 return x == NULL && y == NULL;
31075 }
31076
31077 /* Hashtable helpers. */
31078
31079 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31080 {
31081 static inline hashval_t hash (const dw_loc_list_struct *);
31082 static inline bool equal (const dw_loc_list_struct *,
31083 const dw_loc_list_struct *);
31084 };
31085
31086 /* Return precomputed hash of location list X. */
31087
31088 inline hashval_t
31089 loc_list_hasher::hash (const dw_loc_list_struct *x)
31090 {
31091 return x->hash;
31092 }
31093
31094 /* Return true if location lists A and B are the same. */
31095
31096 inline bool
31097 loc_list_hasher::equal (const dw_loc_list_struct *a,
31098 const dw_loc_list_struct *b)
31099 {
31100 if (a == b)
31101 return 1;
31102 if (a->hash != b->hash)
31103 return 0;
31104 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31105 if (strcmp (a->begin, b->begin) != 0
31106 || strcmp (a->end, b->end) != 0
31107 || (a->section == NULL) != (b->section == NULL)
31108 || (a->section && strcmp (a->section, b->section) != 0)
31109 || a->vbegin != b->vbegin || a->vend != b->vend
31110 || !compare_locs (a->expr, b->expr))
31111 break;
31112 return a == NULL && b == NULL;
31113 }
31114
31115 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31116
31117
31118 /* Recursively optimize location lists referenced from DIE
31119 children and share them whenever possible. */
31120
31121 static void
31122 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31123 {
31124 dw_die_ref c;
31125 dw_attr_node *a;
31126 unsigned ix;
31127 dw_loc_list_struct **slot;
31128 bool drop_locviews = false;
31129 bool has_locviews = false;
31130
31131 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31132 if (AT_class (a) == dw_val_class_loc_list)
31133 {
31134 dw_loc_list_ref list = AT_loc_list (a);
31135 /* TODO: perform some optimizations here, before hashing
31136 it and storing into the hash table. */
31137 hash_loc_list (list);
31138 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31139 if (*slot == NULL)
31140 {
31141 *slot = list;
31142 if (loc_list_has_views (list))
31143 gcc_assert (list->vl_symbol);
31144 else if (list->vl_symbol)
31145 {
31146 drop_locviews = true;
31147 list->vl_symbol = NULL;
31148 }
31149 }
31150 else
31151 {
31152 if (list->vl_symbol && !(*slot)->vl_symbol)
31153 drop_locviews = true;
31154 a->dw_attr_val.v.val_loc_list = *slot;
31155 }
31156 }
31157 else if (AT_class (a) == dw_val_class_view_list)
31158 {
31159 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31160 has_locviews = true;
31161 }
31162
31163
31164 if (drop_locviews && has_locviews)
31165 remove_AT (die, DW_AT_GNU_locviews);
31166
31167 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31168 }
31169
31170
31171 /* Recursively assign each location list a unique index into the debug_addr
31172 section. */
31173
31174 static void
31175 index_location_lists (dw_die_ref die)
31176 {
31177 dw_die_ref c;
31178 dw_attr_node *a;
31179 unsigned ix;
31180
31181 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31182 if (AT_class (a) == dw_val_class_loc_list)
31183 {
31184 dw_loc_list_ref list = AT_loc_list (a);
31185 dw_loc_list_ref curr;
31186 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31187 {
31188 /* Don't index an entry that has already been indexed
31189 or won't be output. Make sure skip_loc_list_entry doesn't
31190 call size_of_locs, because that might cause circular dependency,
31191 index_location_lists requiring address table indexes to be
31192 computed, but adding new indexes through add_addr_table_entry
31193 and address table index computation requiring no new additions
31194 to the hash table. In the rare case of DWARF[234] >= 64KB
31195 location expression, we'll just waste unused address table entry
31196 for it. */
31197 if (curr->begin_entry != NULL
31198 || skip_loc_list_entry (curr))
31199 continue;
31200
31201 curr->begin_entry
31202 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31203 }
31204 }
31205
31206 FOR_EACH_CHILD (die, c, index_location_lists (c));
31207 }
31208
31209 /* Optimize location lists referenced from DIE
31210 children and share them whenever possible. */
31211
31212 static void
31213 optimize_location_lists (dw_die_ref die)
31214 {
31215 loc_list_hash_type htab (500);
31216 optimize_location_lists_1 (die, &htab);
31217 }
31218 \f
31219 /* Traverse the limbo die list, and add parent/child links. The only
31220 dies without parents that should be here are concrete instances of
31221 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31222 For concrete instances, we can get the parent die from the abstract
31223 instance. */
31224
31225 static void
31226 flush_limbo_die_list (void)
31227 {
31228 limbo_die_node *node;
31229
31230 /* get_context_die calls force_decl_die, which can put new DIEs on the
31231 limbo list in LTO mode when nested functions are put in a different
31232 partition than that of their parent function. */
31233 while ((node = limbo_die_list))
31234 {
31235 dw_die_ref die = node->die;
31236 limbo_die_list = node->next;
31237
31238 if (die->die_parent == NULL)
31239 {
31240 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31241
31242 if (origin && origin->die_parent)
31243 add_child_die (origin->die_parent, die);
31244 else if (is_cu_die (die))
31245 ;
31246 else if (seen_error ())
31247 /* It's OK to be confused by errors in the input. */
31248 add_child_die (comp_unit_die (), die);
31249 else
31250 {
31251 /* In certain situations, the lexical block containing a
31252 nested function can be optimized away, which results
31253 in the nested function die being orphaned. Likewise
31254 with the return type of that nested function. Force
31255 this to be a child of the containing function.
31256
31257 It may happen that even the containing function got fully
31258 inlined and optimized out. In that case we are lost and
31259 assign the empty child. This should not be big issue as
31260 the function is likely unreachable too. */
31261 gcc_assert (node->created_for);
31262
31263 if (DECL_P (node->created_for))
31264 origin = get_context_die (DECL_CONTEXT (node->created_for));
31265 else if (TYPE_P (node->created_for))
31266 origin = scope_die_for (node->created_for, comp_unit_die ());
31267 else
31268 origin = comp_unit_die ();
31269
31270 add_child_die (origin, die);
31271 }
31272 }
31273 }
31274 }
31275
31276 /* Reset DIEs so we can output them again. */
31277
31278 static void
31279 reset_dies (dw_die_ref die)
31280 {
31281 dw_die_ref c;
31282
31283 /* Remove stuff we re-generate. */
31284 die->die_mark = 0;
31285 die->die_offset = 0;
31286 die->die_abbrev = 0;
31287 remove_AT (die, DW_AT_sibling);
31288
31289 FOR_EACH_CHILD (die, c, reset_dies (c));
31290 }
31291
31292 /* Output stuff that dwarf requires at the end of every file,
31293 and generate the DWARF-2 debugging info. */
31294
31295 static void
31296 dwarf2out_finish (const char *filename)
31297 {
31298 comdat_type_node *ctnode;
31299 dw_die_ref main_comp_unit_die;
31300 unsigned char checksum[16];
31301 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31302
31303 /* Flush out any latecomers to the limbo party. */
31304 flush_limbo_die_list ();
31305
31306 if (inline_entry_data_table)
31307 gcc_assert (inline_entry_data_table->is_empty ());
31308
31309 if (flag_checking)
31310 {
31311 verify_die (comp_unit_die ());
31312 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31313 verify_die (node->die);
31314 }
31315
31316 /* We shouldn't have any symbols with delayed asm names for
31317 DIEs generated after early finish. */
31318 gcc_assert (deferred_asm_name == NULL);
31319
31320 gen_remaining_tmpl_value_param_die_attribute ();
31321
31322 if (flag_generate_lto || flag_generate_offload)
31323 {
31324 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31325
31326 /* Prune stuff so that dwarf2out_finish runs successfully
31327 for the fat part of the object. */
31328 reset_dies (comp_unit_die ());
31329 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31330 reset_dies (node->die);
31331
31332 hash_table<comdat_type_hasher> comdat_type_table (100);
31333 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31334 {
31335 comdat_type_node **slot
31336 = comdat_type_table.find_slot (ctnode, INSERT);
31337
31338 /* Don't reset types twice. */
31339 if (*slot != HTAB_EMPTY_ENTRY)
31340 continue;
31341
31342 /* Remove the pointer to the line table. */
31343 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31344
31345 if (debug_info_level >= DINFO_LEVEL_TERSE)
31346 reset_dies (ctnode->root_die);
31347
31348 *slot = ctnode;
31349 }
31350
31351 /* Reset die CU symbol so we don't output it twice. */
31352 comp_unit_die ()->die_id.die_symbol = NULL;
31353
31354 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31355 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31356 if (have_macinfo)
31357 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31358
31359 /* Remove indirect string decisions. */
31360 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31361 if (debug_line_str_hash)
31362 {
31363 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31364 debug_line_str_hash = NULL;
31365 }
31366 }
31367
31368 #if ENABLE_ASSERT_CHECKING
31369 {
31370 dw_die_ref die = comp_unit_die (), c;
31371 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31372 }
31373 #endif
31374 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31375 resolve_addr (ctnode->root_die);
31376 resolve_addr (comp_unit_die ());
31377 move_marked_base_types ();
31378
31379 if (dump_file)
31380 {
31381 fprintf (dump_file, "DWARF for %s\n", filename);
31382 print_die (comp_unit_die (), dump_file);
31383 }
31384
31385 /* Initialize sections and labels used for actual assembler output. */
31386 unsigned generation = init_sections_and_labels (false);
31387
31388 /* Traverse the DIE's and add sibling attributes to those DIE's that
31389 have children. */
31390 add_sibling_attributes (comp_unit_die ());
31391 limbo_die_node *node;
31392 for (node = cu_die_list; node; node = node->next)
31393 add_sibling_attributes (node->die);
31394 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31395 add_sibling_attributes (ctnode->root_die);
31396
31397 /* When splitting DWARF info, we put some attributes in the
31398 skeleton compile_unit DIE that remains in the .o, while
31399 most attributes go in the DWO compile_unit_die. */
31400 if (dwarf_split_debug_info)
31401 {
31402 limbo_die_node *cu;
31403 main_comp_unit_die = gen_compile_unit_die (NULL);
31404 if (dwarf_version >= 5)
31405 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31406 cu = limbo_die_list;
31407 gcc_assert (cu->die == main_comp_unit_die);
31408 limbo_die_list = limbo_die_list->next;
31409 cu->next = cu_die_list;
31410 cu_die_list = cu;
31411 }
31412 else
31413 main_comp_unit_die = comp_unit_die ();
31414
31415 /* Output a terminator label for the .text section. */
31416 switch_to_section (text_section);
31417 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31418 if (cold_text_section)
31419 {
31420 switch_to_section (cold_text_section);
31421 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31422 }
31423
31424 /* We can only use the low/high_pc attributes if all of the code was
31425 in .text. */
31426 if (!have_multiple_function_sections
31427 || (dwarf_version < 3 && dwarf_strict))
31428 {
31429 /* Don't add if the CU has no associated code. */
31430 if (text_section_used)
31431 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31432 text_end_label, true);
31433 }
31434 else
31435 {
31436 unsigned fde_idx;
31437 dw_fde_ref fde;
31438 bool range_list_added = false;
31439
31440 if (text_section_used)
31441 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31442 text_end_label, &range_list_added, true);
31443 if (cold_text_section_used)
31444 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31445 cold_end_label, &range_list_added, true);
31446
31447 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31448 {
31449 if (DECL_IGNORED_P (fde->decl))
31450 continue;
31451 if (!fde->in_std_section)
31452 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31453 fde->dw_fde_end, &range_list_added,
31454 true);
31455 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31456 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31457 fde->dw_fde_second_end, &range_list_added,
31458 true);
31459 }
31460
31461 if (range_list_added)
31462 {
31463 /* We need to give .debug_loc and .debug_ranges an appropriate
31464 "base address". Use zero so that these addresses become
31465 absolute. Historically, we've emitted the unexpected
31466 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31467 Emit both to give time for other tools to adapt. */
31468 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31469 if (! dwarf_strict && dwarf_version < 4)
31470 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31471
31472 add_ranges (NULL);
31473 }
31474 }
31475
31476 /* AIX Assembler inserts the length, so adjust the reference to match the
31477 offset expected by debuggers. */
31478 strcpy (dl_section_ref, debug_line_section_label);
31479 if (XCOFF_DEBUGGING_INFO)
31480 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31481
31482 if (debug_info_level >= DINFO_LEVEL_TERSE)
31483 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31484 dl_section_ref);
31485
31486 if (have_macinfo)
31487 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31488 macinfo_section_label);
31489
31490 if (dwarf_split_debug_info)
31491 {
31492 if (have_location_lists)
31493 {
31494 /* Since we generate the loclists in the split DWARF .dwo
31495 file itself, we don't need to generate a loclists_base
31496 attribute for the split compile unit DIE. That attribute
31497 (and using relocatable sec_offset FORMs) isn't allowed
31498 for a split compile unit. Only if the .debug_loclists
31499 section was in the main file, would we need to generate a
31500 loclists_base attribute here (for the full or skeleton
31501 unit DIE). */
31502
31503 /* optimize_location_lists calculates the size of the lists,
31504 so index them first, and assign indices to the entries.
31505 Although optimize_location_lists will remove entries from
31506 the table, it only does so for duplicates, and therefore
31507 only reduces ref_counts to 1. */
31508 index_location_lists (comp_unit_die ());
31509 }
31510
31511 if (addr_index_table != NULL)
31512 {
31513 unsigned int index = 0;
31514 addr_index_table
31515 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31516 (&index);
31517 }
31518 }
31519
31520 loc_list_idx = 0;
31521 if (have_location_lists)
31522 {
31523 optimize_location_lists (comp_unit_die ());
31524 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31525 if (dwarf_version >= 5 && dwarf_split_debug_info)
31526 assign_location_list_indexes (comp_unit_die ());
31527 }
31528
31529 save_macinfo_strings ();
31530
31531 if (dwarf_split_debug_info)
31532 {
31533 unsigned int index = 0;
31534
31535 /* Add attributes common to skeleton compile_units and
31536 type_units. Because these attributes include strings, it
31537 must be done before freezing the string table. Top-level
31538 skeleton die attrs are added when the skeleton type unit is
31539 created, so ensure it is created by this point. */
31540 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31541 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31542 }
31543
31544 /* Output all of the compilation units. We put the main one last so that
31545 the offsets are available to output_pubnames. */
31546 for (node = cu_die_list; node; node = node->next)
31547 output_comp_unit (node->die, 0, NULL);
31548
31549 hash_table<comdat_type_hasher> comdat_type_table (100);
31550 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31551 {
31552 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31553
31554 /* Don't output duplicate types. */
31555 if (*slot != HTAB_EMPTY_ENTRY)
31556 continue;
31557
31558 /* Add a pointer to the line table for the main compilation unit
31559 so that the debugger can make sense of DW_AT_decl_file
31560 attributes. */
31561 if (debug_info_level >= DINFO_LEVEL_TERSE)
31562 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31563 (!dwarf_split_debug_info
31564 ? dl_section_ref
31565 : debug_skeleton_line_section_label));
31566
31567 output_comdat_type_unit (ctnode, false);
31568 *slot = ctnode;
31569 }
31570
31571 if (dwarf_split_debug_info)
31572 {
31573 int mark;
31574 struct md5_ctx ctx;
31575
31576 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31577 index_rnglists ();
31578
31579 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31580 md5_init_ctx (&ctx);
31581 mark = 0;
31582 die_checksum (comp_unit_die (), &ctx, &mark);
31583 unmark_all_dies (comp_unit_die ());
31584 md5_finish_ctx (&ctx, checksum);
31585
31586 if (dwarf_version < 5)
31587 {
31588 /* Use the first 8 bytes of the checksum as the dwo_id,
31589 and add it to both comp-unit DIEs. */
31590 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31591 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31592 }
31593
31594 /* Add the base offset of the ranges table to the skeleton
31595 comp-unit DIE. */
31596 if (!vec_safe_is_empty (ranges_table))
31597 {
31598 if (dwarf_version >= 5)
31599 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31600 ranges_base_label);
31601 else
31602 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31603 ranges_section_label);
31604 }
31605
31606 output_addr_table ();
31607 }
31608
31609 /* Output the main compilation unit if non-empty or if .debug_macinfo
31610 or .debug_macro will be emitted. */
31611 output_comp_unit (comp_unit_die (), have_macinfo,
31612 dwarf_split_debug_info ? checksum : NULL);
31613
31614 if (dwarf_split_debug_info && info_section_emitted)
31615 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31616
31617 /* Output the abbreviation table. */
31618 if (vec_safe_length (abbrev_die_table) != 1)
31619 {
31620 switch_to_section (debug_abbrev_section);
31621 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31622 output_abbrev_section ();
31623 }
31624
31625 /* Output location list section if necessary. */
31626 if (have_location_lists)
31627 {
31628 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31629 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31630 /* Output the location lists info. */
31631 switch_to_section (debug_loc_section);
31632 if (dwarf_version >= 5)
31633 {
31634 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31635 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31636 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31637 dw2_asm_output_data (4, 0xffffffff,
31638 "Initial length escape value indicating "
31639 "64-bit DWARF extension");
31640 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31641 "Length of Location Lists");
31642 ASM_OUTPUT_LABEL (asm_out_file, l1);
31643 output_dwarf_version ();
31644 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31645 dw2_asm_output_data (1, 0, "Segment Size");
31646 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31647 "Offset Entry Count");
31648 }
31649 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31650 if (dwarf_version >= 5 && dwarf_split_debug_info)
31651 {
31652 unsigned int save_loc_list_idx = loc_list_idx;
31653 loc_list_idx = 0;
31654 output_loclists_offsets (comp_unit_die ());
31655 gcc_assert (save_loc_list_idx == loc_list_idx);
31656 }
31657 output_location_lists (comp_unit_die ());
31658 if (dwarf_version >= 5)
31659 ASM_OUTPUT_LABEL (asm_out_file, l2);
31660 }
31661
31662 output_pubtables ();
31663
31664 /* Output the address range information if a CU (.debug_info section)
31665 was emitted. We output an empty table even if we had no functions
31666 to put in it. This because the consumer has no way to tell the
31667 difference between an empty table that we omitted and failure to
31668 generate a table that would have contained data. */
31669 if (info_section_emitted)
31670 {
31671 switch_to_section (debug_aranges_section);
31672 output_aranges ();
31673 }
31674
31675 /* Output ranges section if necessary. */
31676 if (!vec_safe_is_empty (ranges_table))
31677 {
31678 if (dwarf_version >= 5)
31679 output_rnglists (generation);
31680 else
31681 output_ranges ();
31682 }
31683
31684 /* Have to end the macro section. */
31685 if (have_macinfo)
31686 {
31687 switch_to_section (debug_macinfo_section);
31688 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31689 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31690 : debug_skeleton_line_section_label, false);
31691 dw2_asm_output_data (1, 0, "End compilation unit");
31692 }
31693
31694 /* Output the source line correspondence table. We must do this
31695 even if there is no line information. Otherwise, on an empty
31696 translation unit, we will generate a present, but empty,
31697 .debug_info section. IRIX 6.5 `nm' will then complain when
31698 examining the file. This is done late so that any filenames
31699 used by the debug_info section are marked as 'used'. */
31700 switch_to_section (debug_line_section);
31701 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31702 if (! output_asm_line_debug_info ())
31703 output_line_info (false);
31704
31705 if (dwarf_split_debug_info && info_section_emitted)
31706 {
31707 switch_to_section (debug_skeleton_line_section);
31708 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31709 output_line_info (true);
31710 }
31711
31712 /* If we emitted any indirect strings, output the string table too. */
31713 if (debug_str_hash || skeleton_debug_str_hash)
31714 output_indirect_strings ();
31715 if (debug_line_str_hash)
31716 {
31717 switch_to_section (debug_line_str_section);
31718 const enum dwarf_form form = DW_FORM_line_strp;
31719 debug_line_str_hash->traverse<enum dwarf_form,
31720 output_indirect_string> (form);
31721 }
31722
31723 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31724 symview_upper_bound = 0;
31725 if (zero_view_p)
31726 bitmap_clear (zero_view_p);
31727 }
31728
31729 /* Returns a hash value for X (which really is a variable_value_struct). */
31730
31731 inline hashval_t
31732 variable_value_hasher::hash (variable_value_struct *x)
31733 {
31734 return (hashval_t) x->decl_id;
31735 }
31736
31737 /* Return nonzero if decl_id of variable_value_struct X is the same as
31738 UID of decl Y. */
31739
31740 inline bool
31741 variable_value_hasher::equal (variable_value_struct *x, tree y)
31742 {
31743 return x->decl_id == DECL_UID (y);
31744 }
31745
31746 /* Helper function for resolve_variable_value, handle
31747 DW_OP_GNU_variable_value in one location expression.
31748 Return true if exprloc has been changed into loclist. */
31749
31750 static bool
31751 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31752 {
31753 dw_loc_descr_ref next;
31754 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31755 {
31756 next = loc->dw_loc_next;
31757 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31758 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31759 continue;
31760
31761 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31762 if (DECL_CONTEXT (decl) != current_function_decl)
31763 continue;
31764
31765 dw_die_ref ref = lookup_decl_die (decl);
31766 if (ref)
31767 {
31768 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31769 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31770 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31771 continue;
31772 }
31773 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31774 if (l == NULL)
31775 continue;
31776 if (l->dw_loc_next)
31777 {
31778 if (AT_class (a) != dw_val_class_loc)
31779 continue;
31780 switch (a->dw_attr)
31781 {
31782 /* Following attributes allow both exprloc and loclist
31783 classes, so we can change them into a loclist. */
31784 case DW_AT_location:
31785 case DW_AT_string_length:
31786 case DW_AT_return_addr:
31787 case DW_AT_data_member_location:
31788 case DW_AT_frame_base:
31789 case DW_AT_segment:
31790 case DW_AT_static_link:
31791 case DW_AT_use_location:
31792 case DW_AT_vtable_elem_location:
31793 if (prev)
31794 {
31795 prev->dw_loc_next = NULL;
31796 prepend_loc_descr_to_each (l, AT_loc (a));
31797 }
31798 if (next)
31799 add_loc_descr_to_each (l, next);
31800 a->dw_attr_val.val_class = dw_val_class_loc_list;
31801 a->dw_attr_val.val_entry = NULL;
31802 a->dw_attr_val.v.val_loc_list = l;
31803 have_location_lists = true;
31804 return true;
31805 /* Following attributes allow both exprloc and reference,
31806 so if the whole expression is DW_OP_GNU_variable_value alone
31807 we could transform it into reference. */
31808 case DW_AT_byte_size:
31809 case DW_AT_bit_size:
31810 case DW_AT_lower_bound:
31811 case DW_AT_upper_bound:
31812 case DW_AT_bit_stride:
31813 case DW_AT_count:
31814 case DW_AT_allocated:
31815 case DW_AT_associated:
31816 case DW_AT_byte_stride:
31817 if (prev == NULL && next == NULL)
31818 break;
31819 /* FALLTHRU */
31820 default:
31821 if (dwarf_strict)
31822 continue;
31823 break;
31824 }
31825 /* Create DW_TAG_variable that we can refer to. */
31826 gen_decl_die (decl, NULL_TREE, NULL,
31827 lookup_decl_die (current_function_decl));
31828 ref = lookup_decl_die (decl);
31829 if (ref)
31830 {
31831 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31832 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31833 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31834 }
31835 continue;
31836 }
31837 if (prev)
31838 {
31839 prev->dw_loc_next = l->expr;
31840 add_loc_descr (&prev->dw_loc_next, next);
31841 free_loc_descr (loc, NULL);
31842 next = prev->dw_loc_next;
31843 }
31844 else
31845 {
31846 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31847 add_loc_descr (&loc, next);
31848 next = loc;
31849 }
31850 loc = prev;
31851 }
31852 return false;
31853 }
31854
31855 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31856
31857 static void
31858 resolve_variable_value (dw_die_ref die)
31859 {
31860 dw_attr_node *a;
31861 dw_loc_list_ref loc;
31862 unsigned ix;
31863
31864 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31865 switch (AT_class (a))
31866 {
31867 case dw_val_class_loc:
31868 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31869 break;
31870 /* FALLTHRU */
31871 case dw_val_class_loc_list:
31872 loc = AT_loc_list (a);
31873 gcc_assert (loc);
31874 for (; loc; loc = loc->dw_loc_next)
31875 resolve_variable_value_in_expr (a, loc->expr);
31876 break;
31877 default:
31878 break;
31879 }
31880 }
31881
31882 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31883 temporaries in the current function. */
31884
31885 static void
31886 resolve_variable_values (void)
31887 {
31888 if (!variable_value_hash || !current_function_decl)
31889 return;
31890
31891 struct variable_value_struct *node
31892 = variable_value_hash->find_with_hash (current_function_decl,
31893 DECL_UID (current_function_decl));
31894
31895 if (node == NULL)
31896 return;
31897
31898 unsigned int i;
31899 dw_die_ref die;
31900 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31901 resolve_variable_value (die);
31902 }
31903
31904 /* Helper function for note_variable_value, handle one location
31905 expression. */
31906
31907 static void
31908 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31909 {
31910 for (; loc; loc = loc->dw_loc_next)
31911 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31912 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31913 {
31914 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31915 dw_die_ref ref = lookup_decl_die (decl);
31916 if (! ref && (flag_generate_lto || flag_generate_offload))
31917 {
31918 /* ??? This is somewhat a hack because we do not create DIEs
31919 for variables not in BLOCK trees early but when generating
31920 early LTO output we need the dw_val_class_decl_ref to be
31921 fully resolved. For fat LTO objects we'd also like to
31922 undo this after LTO dwarf output. */
31923 gcc_assert (DECL_CONTEXT (decl));
31924 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31925 gcc_assert (ctx != NULL);
31926 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31927 ref = lookup_decl_die (decl);
31928 gcc_assert (ref != NULL);
31929 }
31930 if (ref)
31931 {
31932 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31933 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31934 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31935 continue;
31936 }
31937 if (VAR_P (decl)
31938 && DECL_CONTEXT (decl)
31939 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31940 && lookup_decl_die (DECL_CONTEXT (decl)))
31941 {
31942 if (!variable_value_hash)
31943 variable_value_hash
31944 = hash_table<variable_value_hasher>::create_ggc (10);
31945
31946 tree fndecl = DECL_CONTEXT (decl);
31947 struct variable_value_struct *node;
31948 struct variable_value_struct **slot
31949 = variable_value_hash->find_slot_with_hash (fndecl,
31950 DECL_UID (fndecl),
31951 INSERT);
31952 if (*slot == NULL)
31953 {
31954 node = ggc_cleared_alloc<variable_value_struct> ();
31955 node->decl_id = DECL_UID (fndecl);
31956 *slot = node;
31957 }
31958 else
31959 node = *slot;
31960
31961 vec_safe_push (node->dies, die);
31962 }
31963 }
31964 }
31965
31966 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31967 with dw_val_class_decl_ref operand. */
31968
31969 static void
31970 note_variable_value (dw_die_ref die)
31971 {
31972 dw_die_ref c;
31973 dw_attr_node *a;
31974 dw_loc_list_ref loc;
31975 unsigned ix;
31976
31977 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31978 switch (AT_class (a))
31979 {
31980 case dw_val_class_loc_list:
31981 loc = AT_loc_list (a);
31982 gcc_assert (loc);
31983 if (!loc->noted_variable_value)
31984 {
31985 loc->noted_variable_value = 1;
31986 for (; loc; loc = loc->dw_loc_next)
31987 note_variable_value_in_expr (die, loc->expr);
31988 }
31989 break;
31990 case dw_val_class_loc:
31991 note_variable_value_in_expr (die, AT_loc (a));
31992 break;
31993 default:
31994 break;
31995 }
31996
31997 /* Mark children. */
31998 FOR_EACH_CHILD (die, c, note_variable_value (c));
31999 }
32000
32001 /* Perform any cleanups needed after the early debug generation pass
32002 has run. */
32003
32004 static void
32005 dwarf2out_early_finish (const char *filename)
32006 {
32007 set_early_dwarf s;
32008 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
32009
32010 /* PCH might result in DW_AT_producer string being restored from the
32011 header compilation, so always fill it with empty string initially
32012 and overwrite only here. */
32013 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
32014 producer_string = gen_producer_string ();
32015 producer->dw_attr_val.v.val_str->refcount--;
32016 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32017
32018 /* Add the name for the main input file now. We delayed this from
32019 dwarf2out_init to avoid complications with PCH. */
32020 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
32021 add_comp_dir_attribute (comp_unit_die ());
32022
32023 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
32024 DW_AT_comp_dir into .debug_line_str section. */
32025 if (!output_asm_line_debug_info ()
32026 && dwarf_version >= 5
32027 && DWARF5_USE_DEBUG_LINE_STR)
32028 {
32029 for (int i = 0; i < 2; i++)
32030 {
32031 dw_attr_node *a = get_AT (comp_unit_die (),
32032 i ? DW_AT_comp_dir : DW_AT_name);
32033 if (a == NULL
32034 || AT_class (a) != dw_val_class_str
32035 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
32036 continue;
32037
32038 if (! debug_line_str_hash)
32039 debug_line_str_hash
32040 = hash_table<indirect_string_hasher>::create_ggc (10);
32041
32042 struct indirect_string_node *node
32043 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
32044 set_indirect_string (node);
32045 node->form = DW_FORM_line_strp;
32046 a->dw_attr_val.v.val_str->refcount--;
32047 a->dw_attr_val.v.val_str = node;
32048 }
32049 }
32050
32051 /* With LTO early dwarf was really finished at compile-time, so make
32052 sure to adjust the phase after annotating the LTRANS CU DIE. */
32053 if (in_lto_p)
32054 {
32055 early_dwarf_finished = true;
32056 if (dump_file)
32057 {
32058 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32059 print_die (comp_unit_die (), dump_file);
32060 }
32061 return;
32062 }
32063
32064 /* Walk through the list of incomplete types again, trying once more to
32065 emit full debugging info for them. */
32066 retry_incomplete_types ();
32067
32068 /* The point here is to flush out the limbo list so that it is empty
32069 and we don't need to stream it for LTO. */
32070 flush_limbo_die_list ();
32071
32072 gen_scheduled_generic_parms_dies ();
32073 gen_remaining_tmpl_value_param_die_attribute ();
32074
32075 /* Add DW_AT_linkage_name for all deferred DIEs. */
32076 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32077 {
32078 tree decl = node->created_for;
32079 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32080 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32081 ended up in deferred_asm_name before we knew it was
32082 constant and never written to disk. */
32083 && DECL_ASSEMBLER_NAME (decl))
32084 {
32085 add_linkage_attr (node->die, decl);
32086 move_linkage_attr (node->die);
32087 }
32088 }
32089 deferred_asm_name = NULL;
32090
32091 if (flag_eliminate_unused_debug_types)
32092 prune_unused_types ();
32093
32094 /* Generate separate COMDAT sections for type DIEs. */
32095 if (use_debug_types)
32096 {
32097 break_out_comdat_types (comp_unit_die ());
32098
32099 /* Each new type_unit DIE was added to the limbo die list when created.
32100 Since these have all been added to comdat_type_list, clear the
32101 limbo die list. */
32102 limbo_die_list = NULL;
32103
32104 /* For each new comdat type unit, copy declarations for incomplete
32105 types to make the new unit self-contained (i.e., no direct
32106 references to the main compile unit). */
32107 for (comdat_type_node *ctnode = comdat_type_list;
32108 ctnode != NULL; ctnode = ctnode->next)
32109 copy_decls_for_unworthy_types (ctnode->root_die);
32110 copy_decls_for_unworthy_types (comp_unit_die ());
32111
32112 /* In the process of copying declarations from one unit to another,
32113 we may have left some declarations behind that are no longer
32114 referenced. Prune them. */
32115 prune_unused_types ();
32116 }
32117
32118 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32119 with dw_val_class_decl_ref operand. */
32120 note_variable_value (comp_unit_die ());
32121 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32122 note_variable_value (node->die);
32123 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32124 ctnode = ctnode->next)
32125 note_variable_value (ctnode->root_die);
32126 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32127 note_variable_value (node->die);
32128
32129 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32130 both the main_cu and all skeleton TUs. Making this call unconditional
32131 would end up either adding a second copy of the AT_pubnames attribute, or
32132 requiring a special case in add_top_level_skeleton_die_attrs. */
32133 if (!dwarf_split_debug_info)
32134 add_AT_pubnames (comp_unit_die ());
32135
32136 /* The early debug phase is now finished. */
32137 early_dwarf_finished = true;
32138 if (dump_file)
32139 {
32140 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32141 print_die (comp_unit_die (), dump_file);
32142 }
32143
32144 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32145 if ((!flag_generate_lto && !flag_generate_offload)
32146 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32147 copy_lto_debug_sections operation of the simple object support in
32148 libiberty is not implemented for them yet. */
32149 || TARGET_PECOFF || TARGET_COFF)
32150 return;
32151
32152 /* Now as we are going to output for LTO initialize sections and labels
32153 to the LTO variants. We don't need a random-seed postfix as other
32154 LTO sections as linking the LTO debug sections into one in a partial
32155 link is fine. */
32156 init_sections_and_labels (true);
32157
32158 /* The output below is modeled after dwarf2out_finish with all
32159 location related output removed and some LTO specific changes.
32160 Some refactoring might make both smaller and easier to match up. */
32161
32162 /* Traverse the DIE's and add sibling attributes to those DIE's
32163 that have children. */
32164 add_sibling_attributes (comp_unit_die ());
32165 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32166 add_sibling_attributes (node->die);
32167 for (comdat_type_node *ctnode = comdat_type_list;
32168 ctnode != NULL; ctnode = ctnode->next)
32169 add_sibling_attributes (ctnode->root_die);
32170
32171 /* AIX Assembler inserts the length, so adjust the reference to match the
32172 offset expected by debuggers. */
32173 strcpy (dl_section_ref, debug_line_section_label);
32174 if (XCOFF_DEBUGGING_INFO)
32175 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32176
32177 if (debug_info_level >= DINFO_LEVEL_TERSE)
32178 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32179
32180 if (have_macinfo)
32181 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32182 macinfo_section_label);
32183
32184 save_macinfo_strings ();
32185
32186 if (dwarf_split_debug_info)
32187 {
32188 unsigned int index = 0;
32189 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32190 }
32191
32192 /* Output all of the compilation units. We put the main one last so that
32193 the offsets are available to output_pubnames. */
32194 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32195 output_comp_unit (node->die, 0, NULL);
32196
32197 hash_table<comdat_type_hasher> comdat_type_table (100);
32198 for (comdat_type_node *ctnode = comdat_type_list;
32199 ctnode != NULL; ctnode = ctnode->next)
32200 {
32201 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32202
32203 /* Don't output duplicate types. */
32204 if (*slot != HTAB_EMPTY_ENTRY)
32205 continue;
32206
32207 /* Add a pointer to the line table for the main compilation unit
32208 so that the debugger can make sense of DW_AT_decl_file
32209 attributes. */
32210 if (debug_info_level >= DINFO_LEVEL_TERSE)
32211 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32212 (!dwarf_split_debug_info
32213 ? debug_line_section_label
32214 : debug_skeleton_line_section_label));
32215
32216 output_comdat_type_unit (ctnode, true);
32217 *slot = ctnode;
32218 }
32219
32220 /* Stick a unique symbol to the main debuginfo section. */
32221 compute_comp_unit_symbol (comp_unit_die ());
32222
32223 /* Output the main compilation unit. We always need it if only for
32224 the CU symbol. */
32225 output_comp_unit (comp_unit_die (), true, NULL);
32226
32227 /* Output the abbreviation table. */
32228 if (vec_safe_length (abbrev_die_table) != 1)
32229 {
32230 switch_to_section (debug_abbrev_section);
32231 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32232 output_abbrev_section ();
32233 }
32234
32235 /* Have to end the macro section. */
32236 if (have_macinfo)
32237 {
32238 /* We have to save macinfo state if we need to output it again
32239 for the FAT part of the object. */
32240 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32241 if (flag_fat_lto_objects)
32242 macinfo_table = macinfo_table->copy ();
32243
32244 switch_to_section (debug_macinfo_section);
32245 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32246 output_macinfo (debug_line_section_label, true);
32247 dw2_asm_output_data (1, 0, "End compilation unit");
32248
32249 if (flag_fat_lto_objects)
32250 {
32251 vec_free (macinfo_table);
32252 macinfo_table = saved_macinfo_table;
32253 }
32254 }
32255
32256 /* Emit a skeleton debug_line section. */
32257 switch_to_section (debug_line_section);
32258 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32259 output_line_info (true);
32260
32261 /* If we emitted any indirect strings, output the string table too. */
32262 if (debug_str_hash || skeleton_debug_str_hash)
32263 output_indirect_strings ();
32264 if (debug_line_str_hash)
32265 {
32266 switch_to_section (debug_line_str_section);
32267 const enum dwarf_form form = DW_FORM_line_strp;
32268 debug_line_str_hash->traverse<enum dwarf_form,
32269 output_indirect_string> (form);
32270 }
32271
32272 /* Switch back to the text section. */
32273 switch_to_section (text_section);
32274 }
32275
32276 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32277 within the same process. For use by toplev::finalize. */
32278
32279 void
32280 dwarf2out_c_finalize (void)
32281 {
32282 last_var_location_insn = NULL;
32283 cached_next_real_insn = NULL;
32284 used_rtx_array = NULL;
32285 incomplete_types = NULL;
32286 debug_info_section = NULL;
32287 debug_skeleton_info_section = NULL;
32288 debug_abbrev_section = NULL;
32289 debug_skeleton_abbrev_section = NULL;
32290 debug_aranges_section = NULL;
32291 debug_addr_section = NULL;
32292 debug_macinfo_section = NULL;
32293 debug_line_section = NULL;
32294 debug_skeleton_line_section = NULL;
32295 debug_loc_section = NULL;
32296 debug_pubnames_section = NULL;
32297 debug_pubtypes_section = NULL;
32298 debug_str_section = NULL;
32299 debug_line_str_section = NULL;
32300 debug_str_dwo_section = NULL;
32301 debug_str_offsets_section = NULL;
32302 debug_ranges_section = NULL;
32303 debug_frame_section = NULL;
32304 fde_vec = NULL;
32305 debug_str_hash = NULL;
32306 debug_line_str_hash = NULL;
32307 skeleton_debug_str_hash = NULL;
32308 dw2_string_counter = 0;
32309 have_multiple_function_sections = false;
32310 text_section_used = false;
32311 cold_text_section_used = false;
32312 cold_text_section = NULL;
32313 current_unit_personality = NULL;
32314
32315 early_dwarf = false;
32316 early_dwarf_finished = false;
32317
32318 next_die_offset = 0;
32319 single_comp_unit_die = NULL;
32320 comdat_type_list = NULL;
32321 limbo_die_list = NULL;
32322 file_table = NULL;
32323 decl_die_table = NULL;
32324 common_block_die_table = NULL;
32325 decl_loc_table = NULL;
32326 call_arg_locations = NULL;
32327 call_arg_loc_last = NULL;
32328 call_site_count = -1;
32329 tail_call_site_count = -1;
32330 cached_dw_loc_list_table = NULL;
32331 abbrev_die_table = NULL;
32332 delete dwarf_proc_stack_usage_map;
32333 dwarf_proc_stack_usage_map = NULL;
32334 line_info_label_num = 0;
32335 cur_line_info_table = NULL;
32336 text_section_line_info = NULL;
32337 cold_text_section_line_info = NULL;
32338 separate_line_info = NULL;
32339 info_section_emitted = false;
32340 pubname_table = NULL;
32341 pubtype_table = NULL;
32342 macinfo_table = NULL;
32343 ranges_table = NULL;
32344 ranges_by_label = NULL;
32345 rnglist_idx = 0;
32346 have_location_lists = false;
32347 loclabel_num = 0;
32348 poc_label_num = 0;
32349 last_emitted_file = NULL;
32350 label_num = 0;
32351 tmpl_value_parm_die_table = NULL;
32352 generic_type_instances = NULL;
32353 frame_pointer_fb_offset = 0;
32354 frame_pointer_fb_offset_valid = false;
32355 base_types.release ();
32356 XDELETEVEC (producer_string);
32357 producer_string = NULL;
32358 }
32359
32360 #include "gt-dwarf2out.h"