]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/dwarf2out.c
PR debug/85302
[thirdparty/gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_symview:
1438 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1439 case dw_val_class_lbl_id:
1440 case dw_val_class_lineptr:
1441 case dw_val_class_macptr:
1442 case dw_val_class_loclistsptr:
1443 case dw_val_class_high_pc:
1444 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1445 case dw_val_class_str:
1446 return a->v.val_str == b->v.val_str;
1447 case dw_val_class_flag:
1448 return a->v.val_flag == b->v.val_flag;
1449 case dw_val_class_file:
1450 case dw_val_class_file_implicit:
1451 return a->v.val_file == b->v.val_file;
1452 case dw_val_class_decl_ref:
1453 return a->v.val_decl_ref == b->v.val_decl_ref;
1454
1455 case dw_val_class_const_double:
1456 return (a->v.val_double.high == b->v.val_double.high
1457 && a->v.val_double.low == b->v.val_double.low);
1458
1459 case dw_val_class_wide_int:
1460 return *a->v.val_wide == *b->v.val_wide;
1461
1462 case dw_val_class_vec:
1463 {
1464 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1465 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1466
1467 return (a_len == b_len
1468 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1469 }
1470
1471 case dw_val_class_data8:
1472 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1473
1474 case dw_val_class_vms_delta:
1475 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1476 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1477
1478 case dw_val_class_discr_value:
1479 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1480 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1481 case dw_val_class_discr_list:
1482 /* It makes no sense comparing two discriminant value lists. */
1483 return false;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 /* Compare two location atoms for exact equality. */
1489
1490 static bool
1491 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1492 {
1493 if (a->dw_loc_opc != b->dw_loc_opc)
1494 return false;
1495
1496 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1497 address size, but since we always allocate cleared storage it
1498 should be zero for other types of locations. */
1499 if (a->dtprel != b->dtprel)
1500 return false;
1501
1502 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1503 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1504 }
1505
1506 /* Compare two complete location expressions for exact equality. */
1507
1508 bool
1509 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1510 {
1511 while (1)
1512 {
1513 if (a == b)
1514 return true;
1515 if (a == NULL || b == NULL)
1516 return false;
1517 if (!loc_descr_equal_p_1 (a, b))
1518 return false;
1519
1520 a = a->dw_loc_next;
1521 b = b->dw_loc_next;
1522 }
1523 }
1524
1525
1526 /* Add a constant POLY_OFFSET to a location expression. */
1527
1528 static void
1529 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1530 {
1531 dw_loc_descr_ref loc;
1532 HOST_WIDE_INT *p;
1533
1534 gcc_assert (*list_head != NULL);
1535
1536 if (known_eq (poly_offset, 0))
1537 return;
1538
1539 /* Find the end of the chain. */
1540 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1541 ;
1542
1543 HOST_WIDE_INT offset;
1544 if (!poly_offset.is_constant (&offset))
1545 {
1546 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1547 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1548 return;
1549 }
1550
1551 p = NULL;
1552 if (loc->dw_loc_opc == DW_OP_fbreg
1553 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1554 p = &loc->dw_loc_oprnd1.v.val_int;
1555 else if (loc->dw_loc_opc == DW_OP_bregx)
1556 p = &loc->dw_loc_oprnd2.v.val_int;
1557
1558 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1559 offset. Don't optimize if an signed integer overflow would happen. */
1560 if (p != NULL
1561 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1562 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1563 *p += offset;
1564
1565 else if (offset > 0)
1566 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1567
1568 else
1569 {
1570 loc->dw_loc_next
1571 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1572 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1573 }
1574 }
1575
1576 /* Return a pointer to a newly allocated location description for
1577 REG and OFFSET. */
1578
1579 static inline dw_loc_descr_ref
1580 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1581 {
1582 HOST_WIDE_INT const_offset;
1583 if (offset.is_constant (&const_offset))
1584 {
1585 if (reg <= 31)
1586 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1587 const_offset, 0);
1588 else
1589 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1590 }
1591 else
1592 {
1593 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1594 loc_descr_plus_const (&ret, offset);
1595 return ret;
1596 }
1597 }
1598
1599 /* Add a constant OFFSET to a location list. */
1600
1601 static void
1602 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1603 {
1604 dw_loc_list_ref d;
1605 for (d = list_head; d != NULL; d = d->dw_loc_next)
1606 loc_descr_plus_const (&d->expr, offset);
1607 }
1608
1609 #define DWARF_REF_SIZE \
1610 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1611
1612 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1613 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1614 DW_FORM_data16 with 128 bits. */
1615 #define DWARF_LARGEST_DATA_FORM_BITS \
1616 (dwarf_version >= 5 ? 128 : 64)
1617
1618 /* Utility inline function for construction of ops that were GNU extension
1619 before DWARF 5. */
1620 static inline enum dwarf_location_atom
1621 dwarf_OP (enum dwarf_location_atom op)
1622 {
1623 switch (op)
1624 {
1625 case DW_OP_implicit_pointer:
1626 if (dwarf_version < 5)
1627 return DW_OP_GNU_implicit_pointer;
1628 break;
1629
1630 case DW_OP_entry_value:
1631 if (dwarf_version < 5)
1632 return DW_OP_GNU_entry_value;
1633 break;
1634
1635 case DW_OP_const_type:
1636 if (dwarf_version < 5)
1637 return DW_OP_GNU_const_type;
1638 break;
1639
1640 case DW_OP_regval_type:
1641 if (dwarf_version < 5)
1642 return DW_OP_GNU_regval_type;
1643 break;
1644
1645 case DW_OP_deref_type:
1646 if (dwarf_version < 5)
1647 return DW_OP_GNU_deref_type;
1648 break;
1649
1650 case DW_OP_convert:
1651 if (dwarf_version < 5)
1652 return DW_OP_GNU_convert;
1653 break;
1654
1655 case DW_OP_reinterpret:
1656 if (dwarf_version < 5)
1657 return DW_OP_GNU_reinterpret;
1658 break;
1659
1660 default:
1661 break;
1662 }
1663 return op;
1664 }
1665
1666 /* Similarly for attributes. */
1667 static inline enum dwarf_attribute
1668 dwarf_AT (enum dwarf_attribute at)
1669 {
1670 switch (at)
1671 {
1672 case DW_AT_call_return_pc:
1673 if (dwarf_version < 5)
1674 return DW_AT_low_pc;
1675 break;
1676
1677 case DW_AT_call_tail_call:
1678 if (dwarf_version < 5)
1679 return DW_AT_GNU_tail_call;
1680 break;
1681
1682 case DW_AT_call_origin:
1683 if (dwarf_version < 5)
1684 return DW_AT_abstract_origin;
1685 break;
1686
1687 case DW_AT_call_target:
1688 if (dwarf_version < 5)
1689 return DW_AT_GNU_call_site_target;
1690 break;
1691
1692 case DW_AT_call_target_clobbered:
1693 if (dwarf_version < 5)
1694 return DW_AT_GNU_call_site_target_clobbered;
1695 break;
1696
1697 case DW_AT_call_parameter:
1698 if (dwarf_version < 5)
1699 return DW_AT_abstract_origin;
1700 break;
1701
1702 case DW_AT_call_value:
1703 if (dwarf_version < 5)
1704 return DW_AT_GNU_call_site_value;
1705 break;
1706
1707 case DW_AT_call_data_value:
1708 if (dwarf_version < 5)
1709 return DW_AT_GNU_call_site_data_value;
1710 break;
1711
1712 case DW_AT_call_all_calls:
1713 if (dwarf_version < 5)
1714 return DW_AT_GNU_all_call_sites;
1715 break;
1716
1717 case DW_AT_call_all_tail_calls:
1718 if (dwarf_version < 5)
1719 return DW_AT_GNU_all_tail_call_sites;
1720 break;
1721
1722 case DW_AT_dwo_name:
1723 if (dwarf_version < 5)
1724 return DW_AT_GNU_dwo_name;
1725 break;
1726
1727 default:
1728 break;
1729 }
1730 return at;
1731 }
1732
1733 /* And similarly for tags. */
1734 static inline enum dwarf_tag
1735 dwarf_TAG (enum dwarf_tag tag)
1736 {
1737 switch (tag)
1738 {
1739 case DW_TAG_call_site:
1740 if (dwarf_version < 5)
1741 return DW_TAG_GNU_call_site;
1742 break;
1743
1744 case DW_TAG_call_site_parameter:
1745 if (dwarf_version < 5)
1746 return DW_TAG_GNU_call_site_parameter;
1747 break;
1748
1749 default:
1750 break;
1751 }
1752 return tag;
1753 }
1754
1755 static unsigned long int get_base_type_offset (dw_die_ref);
1756
1757 /* Return the size of a location descriptor. */
1758
1759 static unsigned long
1760 size_of_loc_descr (dw_loc_descr_ref loc)
1761 {
1762 unsigned long size = 1;
1763
1764 switch (loc->dw_loc_opc)
1765 {
1766 case DW_OP_addr:
1767 size += DWARF2_ADDR_SIZE;
1768 break;
1769 case DW_OP_GNU_addr_index:
1770 case DW_OP_GNU_const_index:
1771 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1772 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1773 break;
1774 case DW_OP_const1u:
1775 case DW_OP_const1s:
1776 size += 1;
1777 break;
1778 case DW_OP_const2u:
1779 case DW_OP_const2s:
1780 size += 2;
1781 break;
1782 case DW_OP_const4u:
1783 case DW_OP_const4s:
1784 size += 4;
1785 break;
1786 case DW_OP_const8u:
1787 case DW_OP_const8s:
1788 size += 8;
1789 break;
1790 case DW_OP_constu:
1791 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1792 break;
1793 case DW_OP_consts:
1794 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1795 break;
1796 case DW_OP_pick:
1797 size += 1;
1798 break;
1799 case DW_OP_plus_uconst:
1800 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1801 break;
1802 case DW_OP_skip:
1803 case DW_OP_bra:
1804 size += 2;
1805 break;
1806 case DW_OP_breg0:
1807 case DW_OP_breg1:
1808 case DW_OP_breg2:
1809 case DW_OP_breg3:
1810 case DW_OP_breg4:
1811 case DW_OP_breg5:
1812 case DW_OP_breg6:
1813 case DW_OP_breg7:
1814 case DW_OP_breg8:
1815 case DW_OP_breg9:
1816 case DW_OP_breg10:
1817 case DW_OP_breg11:
1818 case DW_OP_breg12:
1819 case DW_OP_breg13:
1820 case DW_OP_breg14:
1821 case DW_OP_breg15:
1822 case DW_OP_breg16:
1823 case DW_OP_breg17:
1824 case DW_OP_breg18:
1825 case DW_OP_breg19:
1826 case DW_OP_breg20:
1827 case DW_OP_breg21:
1828 case DW_OP_breg22:
1829 case DW_OP_breg23:
1830 case DW_OP_breg24:
1831 case DW_OP_breg25:
1832 case DW_OP_breg26:
1833 case DW_OP_breg27:
1834 case DW_OP_breg28:
1835 case DW_OP_breg29:
1836 case DW_OP_breg30:
1837 case DW_OP_breg31:
1838 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1839 break;
1840 case DW_OP_regx:
1841 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1842 break;
1843 case DW_OP_fbreg:
1844 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1845 break;
1846 case DW_OP_bregx:
1847 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1848 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1849 break;
1850 case DW_OP_piece:
1851 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1852 break;
1853 case DW_OP_bit_piece:
1854 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1855 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1856 break;
1857 case DW_OP_deref_size:
1858 case DW_OP_xderef_size:
1859 size += 1;
1860 break;
1861 case DW_OP_call2:
1862 size += 2;
1863 break;
1864 case DW_OP_call4:
1865 size += 4;
1866 break;
1867 case DW_OP_call_ref:
1868 case DW_OP_GNU_variable_value:
1869 size += DWARF_REF_SIZE;
1870 break;
1871 case DW_OP_implicit_value:
1872 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1873 + loc->dw_loc_oprnd1.v.val_unsigned;
1874 break;
1875 case DW_OP_implicit_pointer:
1876 case DW_OP_GNU_implicit_pointer:
1877 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1878 break;
1879 case DW_OP_entry_value:
1880 case DW_OP_GNU_entry_value:
1881 {
1882 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1883 size += size_of_uleb128 (op_size) + op_size;
1884 break;
1885 }
1886 case DW_OP_const_type:
1887 case DW_OP_GNU_const_type:
1888 {
1889 unsigned long o
1890 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1891 size += size_of_uleb128 (o) + 1;
1892 switch (loc->dw_loc_oprnd2.val_class)
1893 {
1894 case dw_val_class_vec:
1895 size += loc->dw_loc_oprnd2.v.val_vec.length
1896 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1897 break;
1898 case dw_val_class_const:
1899 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1900 break;
1901 case dw_val_class_const_double:
1902 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1903 break;
1904 case dw_val_class_wide_int:
1905 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1906 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1907 break;
1908 default:
1909 gcc_unreachable ();
1910 }
1911 break;
1912 }
1913 case DW_OP_regval_type:
1914 case DW_OP_GNU_regval_type:
1915 {
1916 unsigned long o
1917 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1918 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1919 + size_of_uleb128 (o);
1920 }
1921 break;
1922 case DW_OP_deref_type:
1923 case DW_OP_GNU_deref_type:
1924 {
1925 unsigned long o
1926 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1927 size += 1 + size_of_uleb128 (o);
1928 }
1929 break;
1930 case DW_OP_convert:
1931 case DW_OP_reinterpret:
1932 case DW_OP_GNU_convert:
1933 case DW_OP_GNU_reinterpret:
1934 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1935 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1936 else
1937 {
1938 unsigned long o
1939 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1940 size += size_of_uleb128 (o);
1941 }
1942 break;
1943 case DW_OP_GNU_parameter_ref:
1944 size += 4;
1945 break;
1946 default:
1947 break;
1948 }
1949
1950 return size;
1951 }
1952
1953 /* Return the size of a series of location descriptors. */
1954
1955 unsigned long
1956 size_of_locs (dw_loc_descr_ref loc)
1957 {
1958 dw_loc_descr_ref l;
1959 unsigned long size;
1960
1961 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1962 field, to avoid writing to a PCH file. */
1963 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1964 {
1965 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1966 break;
1967 size += size_of_loc_descr (l);
1968 }
1969 if (! l)
1970 return size;
1971
1972 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1973 {
1974 l->dw_loc_addr = size;
1975 size += size_of_loc_descr (l);
1976 }
1977
1978 return size;
1979 }
1980
1981 /* Return the size of the value in a DW_AT_discr_value attribute. */
1982
1983 static int
1984 size_of_discr_value (dw_discr_value *discr_value)
1985 {
1986 if (discr_value->pos)
1987 return size_of_uleb128 (discr_value->v.uval);
1988 else
1989 return size_of_sleb128 (discr_value->v.sval);
1990 }
1991
1992 /* Return the size of the value in a DW_AT_discr_list attribute. */
1993
1994 static int
1995 size_of_discr_list (dw_discr_list_ref discr_list)
1996 {
1997 int size = 0;
1998
1999 for (dw_discr_list_ref list = discr_list;
2000 list != NULL;
2001 list = list->dw_discr_next)
2002 {
2003 /* One byte for the discriminant value descriptor, and then one or two
2004 LEB128 numbers, depending on whether it's a single case label or a
2005 range label. */
2006 size += 1;
2007 size += size_of_discr_value (&list->dw_discr_lower_bound);
2008 if (list->dw_discr_range != 0)
2009 size += size_of_discr_value (&list->dw_discr_upper_bound);
2010 }
2011 return size;
2012 }
2013
2014 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2015 static void get_ref_die_offset_label (char *, dw_die_ref);
2016 static unsigned long int get_ref_die_offset (dw_die_ref);
2017
2018 /* Output location description stack opcode's operands (if any).
2019 The for_eh_or_skip parameter controls whether register numbers are
2020 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2021 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2022 info). This should be suppressed for the cases that have not been converted
2023 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2024
2025 static void
2026 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2027 {
2028 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2029 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2030
2031 switch (loc->dw_loc_opc)
2032 {
2033 #ifdef DWARF2_DEBUGGING_INFO
2034 case DW_OP_const2u:
2035 case DW_OP_const2s:
2036 dw2_asm_output_data (2, val1->v.val_int, NULL);
2037 break;
2038 case DW_OP_const4u:
2039 if (loc->dtprel)
2040 {
2041 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2042 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2043 val1->v.val_addr);
2044 fputc ('\n', asm_out_file);
2045 break;
2046 }
2047 /* FALLTHRU */
2048 case DW_OP_const4s:
2049 dw2_asm_output_data (4, val1->v.val_int, NULL);
2050 break;
2051 case DW_OP_const8u:
2052 if (loc->dtprel)
2053 {
2054 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2055 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2056 val1->v.val_addr);
2057 fputc ('\n', asm_out_file);
2058 break;
2059 }
2060 /* FALLTHRU */
2061 case DW_OP_const8s:
2062 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2063 dw2_asm_output_data (8, val1->v.val_int, NULL);
2064 break;
2065 case DW_OP_skip:
2066 case DW_OP_bra:
2067 {
2068 int offset;
2069
2070 gcc_assert (val1->val_class == dw_val_class_loc);
2071 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2072
2073 dw2_asm_output_data (2, offset, NULL);
2074 }
2075 break;
2076 case DW_OP_implicit_value:
2077 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2078 switch (val2->val_class)
2079 {
2080 case dw_val_class_const:
2081 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2082 break;
2083 case dw_val_class_vec:
2084 {
2085 unsigned int elt_size = val2->v.val_vec.elt_size;
2086 unsigned int len = val2->v.val_vec.length;
2087 unsigned int i;
2088 unsigned char *p;
2089
2090 if (elt_size > sizeof (HOST_WIDE_INT))
2091 {
2092 elt_size /= 2;
2093 len *= 2;
2094 }
2095 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2096 i < len;
2097 i++, p += elt_size)
2098 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2099 "fp or vector constant word %u", i);
2100 }
2101 break;
2102 case dw_val_class_const_double:
2103 {
2104 unsigned HOST_WIDE_INT first, second;
2105
2106 if (WORDS_BIG_ENDIAN)
2107 {
2108 first = val2->v.val_double.high;
2109 second = val2->v.val_double.low;
2110 }
2111 else
2112 {
2113 first = val2->v.val_double.low;
2114 second = val2->v.val_double.high;
2115 }
2116 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2117 first, NULL);
2118 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2119 second, NULL);
2120 }
2121 break;
2122 case dw_val_class_wide_int:
2123 {
2124 int i;
2125 int len = get_full_len (*val2->v.val_wide);
2126 if (WORDS_BIG_ENDIAN)
2127 for (i = len - 1; i >= 0; --i)
2128 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2129 val2->v.val_wide->elt (i), NULL);
2130 else
2131 for (i = 0; i < len; ++i)
2132 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2133 val2->v.val_wide->elt (i), NULL);
2134 }
2135 break;
2136 case dw_val_class_addr:
2137 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2138 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2139 break;
2140 default:
2141 gcc_unreachable ();
2142 }
2143 break;
2144 #else
2145 case DW_OP_const2u:
2146 case DW_OP_const2s:
2147 case DW_OP_const4u:
2148 case DW_OP_const4s:
2149 case DW_OP_const8u:
2150 case DW_OP_const8s:
2151 case DW_OP_skip:
2152 case DW_OP_bra:
2153 case DW_OP_implicit_value:
2154 /* We currently don't make any attempt to make sure these are
2155 aligned properly like we do for the main unwind info, so
2156 don't support emitting things larger than a byte if we're
2157 only doing unwinding. */
2158 gcc_unreachable ();
2159 #endif
2160 case DW_OP_const1u:
2161 case DW_OP_const1s:
2162 dw2_asm_output_data (1, val1->v.val_int, NULL);
2163 break;
2164 case DW_OP_constu:
2165 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2166 break;
2167 case DW_OP_consts:
2168 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2169 break;
2170 case DW_OP_pick:
2171 dw2_asm_output_data (1, val1->v.val_int, NULL);
2172 break;
2173 case DW_OP_plus_uconst:
2174 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2175 break;
2176 case DW_OP_breg0:
2177 case DW_OP_breg1:
2178 case DW_OP_breg2:
2179 case DW_OP_breg3:
2180 case DW_OP_breg4:
2181 case DW_OP_breg5:
2182 case DW_OP_breg6:
2183 case DW_OP_breg7:
2184 case DW_OP_breg8:
2185 case DW_OP_breg9:
2186 case DW_OP_breg10:
2187 case DW_OP_breg11:
2188 case DW_OP_breg12:
2189 case DW_OP_breg13:
2190 case DW_OP_breg14:
2191 case DW_OP_breg15:
2192 case DW_OP_breg16:
2193 case DW_OP_breg17:
2194 case DW_OP_breg18:
2195 case DW_OP_breg19:
2196 case DW_OP_breg20:
2197 case DW_OP_breg21:
2198 case DW_OP_breg22:
2199 case DW_OP_breg23:
2200 case DW_OP_breg24:
2201 case DW_OP_breg25:
2202 case DW_OP_breg26:
2203 case DW_OP_breg27:
2204 case DW_OP_breg28:
2205 case DW_OP_breg29:
2206 case DW_OP_breg30:
2207 case DW_OP_breg31:
2208 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2209 break;
2210 case DW_OP_regx:
2211 {
2212 unsigned r = val1->v.val_unsigned;
2213 if (for_eh_or_skip >= 0)
2214 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2215 gcc_assert (size_of_uleb128 (r)
2216 == size_of_uleb128 (val1->v.val_unsigned));
2217 dw2_asm_output_data_uleb128 (r, NULL);
2218 }
2219 break;
2220 case DW_OP_fbreg:
2221 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2222 break;
2223 case DW_OP_bregx:
2224 {
2225 unsigned r = val1->v.val_unsigned;
2226 if (for_eh_or_skip >= 0)
2227 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2228 gcc_assert (size_of_uleb128 (r)
2229 == size_of_uleb128 (val1->v.val_unsigned));
2230 dw2_asm_output_data_uleb128 (r, NULL);
2231 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2232 }
2233 break;
2234 case DW_OP_piece:
2235 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2236 break;
2237 case DW_OP_bit_piece:
2238 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2239 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2240 break;
2241 case DW_OP_deref_size:
2242 case DW_OP_xderef_size:
2243 dw2_asm_output_data (1, val1->v.val_int, NULL);
2244 break;
2245
2246 case DW_OP_addr:
2247 if (loc->dtprel)
2248 {
2249 if (targetm.asm_out.output_dwarf_dtprel)
2250 {
2251 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2252 DWARF2_ADDR_SIZE,
2253 val1->v.val_addr);
2254 fputc ('\n', asm_out_file);
2255 }
2256 else
2257 gcc_unreachable ();
2258 }
2259 else
2260 {
2261 #ifdef DWARF2_DEBUGGING_INFO
2262 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2263 #else
2264 gcc_unreachable ();
2265 #endif
2266 }
2267 break;
2268
2269 case DW_OP_GNU_addr_index:
2270 case DW_OP_GNU_const_index:
2271 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2272 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2273 "(index into .debug_addr)");
2274 break;
2275
2276 case DW_OP_call2:
2277 case DW_OP_call4:
2278 {
2279 unsigned long die_offset
2280 = get_ref_die_offset (val1->v.val_die_ref.die);
2281 /* Make sure the offset has been computed and that we can encode it as
2282 an operand. */
2283 gcc_assert (die_offset > 0
2284 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2285 ? 0xffff
2286 : 0xffffffff));
2287 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2288 die_offset, NULL);
2289 }
2290 break;
2291
2292 case DW_OP_call_ref:
2293 case DW_OP_GNU_variable_value:
2294 {
2295 char label[MAX_ARTIFICIAL_LABEL_BYTES
2296 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2297 gcc_assert (val1->val_class == dw_val_class_die_ref);
2298 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2299 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2300 }
2301 break;
2302
2303 case DW_OP_implicit_pointer:
2304 case DW_OP_GNU_implicit_pointer:
2305 {
2306 char label[MAX_ARTIFICIAL_LABEL_BYTES
2307 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2308 gcc_assert (val1->val_class == dw_val_class_die_ref);
2309 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2310 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2311 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2312 }
2313 break;
2314
2315 case DW_OP_entry_value:
2316 case DW_OP_GNU_entry_value:
2317 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2318 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2319 break;
2320
2321 case DW_OP_const_type:
2322 case DW_OP_GNU_const_type:
2323 {
2324 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2325 gcc_assert (o);
2326 dw2_asm_output_data_uleb128 (o, NULL);
2327 switch (val2->val_class)
2328 {
2329 case dw_val_class_const:
2330 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2331 dw2_asm_output_data (1, l, NULL);
2332 dw2_asm_output_data (l, val2->v.val_int, NULL);
2333 break;
2334 case dw_val_class_vec:
2335 {
2336 unsigned int elt_size = val2->v.val_vec.elt_size;
2337 unsigned int len = val2->v.val_vec.length;
2338 unsigned int i;
2339 unsigned char *p;
2340
2341 l = len * elt_size;
2342 dw2_asm_output_data (1, l, NULL);
2343 if (elt_size > sizeof (HOST_WIDE_INT))
2344 {
2345 elt_size /= 2;
2346 len *= 2;
2347 }
2348 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2349 i < len;
2350 i++, p += elt_size)
2351 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2352 "fp or vector constant word %u", i);
2353 }
2354 break;
2355 case dw_val_class_const_double:
2356 {
2357 unsigned HOST_WIDE_INT first, second;
2358 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2359
2360 dw2_asm_output_data (1, 2 * l, NULL);
2361 if (WORDS_BIG_ENDIAN)
2362 {
2363 first = val2->v.val_double.high;
2364 second = val2->v.val_double.low;
2365 }
2366 else
2367 {
2368 first = val2->v.val_double.low;
2369 second = val2->v.val_double.high;
2370 }
2371 dw2_asm_output_data (l, first, NULL);
2372 dw2_asm_output_data (l, second, NULL);
2373 }
2374 break;
2375 case dw_val_class_wide_int:
2376 {
2377 int i;
2378 int len = get_full_len (*val2->v.val_wide);
2379 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2380
2381 dw2_asm_output_data (1, len * l, NULL);
2382 if (WORDS_BIG_ENDIAN)
2383 for (i = len - 1; i >= 0; --i)
2384 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2385 else
2386 for (i = 0; i < len; ++i)
2387 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2388 }
2389 break;
2390 default:
2391 gcc_unreachable ();
2392 }
2393 }
2394 break;
2395 case DW_OP_regval_type:
2396 case DW_OP_GNU_regval_type:
2397 {
2398 unsigned r = val1->v.val_unsigned;
2399 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2400 gcc_assert (o);
2401 if (for_eh_or_skip >= 0)
2402 {
2403 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2404 gcc_assert (size_of_uleb128 (r)
2405 == size_of_uleb128 (val1->v.val_unsigned));
2406 }
2407 dw2_asm_output_data_uleb128 (r, NULL);
2408 dw2_asm_output_data_uleb128 (o, NULL);
2409 }
2410 break;
2411 case DW_OP_deref_type:
2412 case DW_OP_GNU_deref_type:
2413 {
2414 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2415 gcc_assert (o);
2416 dw2_asm_output_data (1, val1->v.val_int, NULL);
2417 dw2_asm_output_data_uleb128 (o, NULL);
2418 }
2419 break;
2420 case DW_OP_convert:
2421 case DW_OP_reinterpret:
2422 case DW_OP_GNU_convert:
2423 case DW_OP_GNU_reinterpret:
2424 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2425 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2426 else
2427 {
2428 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2429 gcc_assert (o);
2430 dw2_asm_output_data_uleb128 (o, NULL);
2431 }
2432 break;
2433
2434 case DW_OP_GNU_parameter_ref:
2435 {
2436 unsigned long o;
2437 gcc_assert (val1->val_class == dw_val_class_die_ref);
2438 o = get_ref_die_offset (val1->v.val_die_ref.die);
2439 dw2_asm_output_data (4, o, NULL);
2440 }
2441 break;
2442
2443 default:
2444 /* Other codes have no operands. */
2445 break;
2446 }
2447 }
2448
2449 /* Output a sequence of location operations.
2450 The for_eh_or_skip parameter controls whether register numbers are
2451 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2452 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2453 info). This should be suppressed for the cases that have not been converted
2454 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2455
2456 void
2457 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2458 {
2459 for (; loc != NULL; loc = loc->dw_loc_next)
2460 {
2461 enum dwarf_location_atom opc = loc->dw_loc_opc;
2462 /* Output the opcode. */
2463 if (for_eh_or_skip >= 0
2464 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2465 {
2466 unsigned r = (opc - DW_OP_breg0);
2467 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2468 gcc_assert (r <= 31);
2469 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2470 }
2471 else if (for_eh_or_skip >= 0
2472 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2473 {
2474 unsigned r = (opc - DW_OP_reg0);
2475 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2476 gcc_assert (r <= 31);
2477 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2478 }
2479
2480 dw2_asm_output_data (1, opc,
2481 "%s", dwarf_stack_op_name (opc));
2482
2483 /* Output the operand(s) (if any). */
2484 output_loc_operands (loc, for_eh_or_skip);
2485 }
2486 }
2487
2488 /* Output location description stack opcode's operands (if any).
2489 The output is single bytes on a line, suitable for .cfi_escape. */
2490
2491 static void
2492 output_loc_operands_raw (dw_loc_descr_ref loc)
2493 {
2494 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2495 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2496
2497 switch (loc->dw_loc_opc)
2498 {
2499 case DW_OP_addr:
2500 case DW_OP_GNU_addr_index:
2501 case DW_OP_GNU_const_index:
2502 case DW_OP_implicit_value:
2503 /* We cannot output addresses in .cfi_escape, only bytes. */
2504 gcc_unreachable ();
2505
2506 case DW_OP_const1u:
2507 case DW_OP_const1s:
2508 case DW_OP_pick:
2509 case DW_OP_deref_size:
2510 case DW_OP_xderef_size:
2511 fputc (',', asm_out_file);
2512 dw2_asm_output_data_raw (1, val1->v.val_int);
2513 break;
2514
2515 case DW_OP_const2u:
2516 case DW_OP_const2s:
2517 fputc (',', asm_out_file);
2518 dw2_asm_output_data_raw (2, val1->v.val_int);
2519 break;
2520
2521 case DW_OP_const4u:
2522 case DW_OP_const4s:
2523 fputc (',', asm_out_file);
2524 dw2_asm_output_data_raw (4, val1->v.val_int);
2525 break;
2526
2527 case DW_OP_const8u:
2528 case DW_OP_const8s:
2529 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2530 fputc (',', asm_out_file);
2531 dw2_asm_output_data_raw (8, val1->v.val_int);
2532 break;
2533
2534 case DW_OP_skip:
2535 case DW_OP_bra:
2536 {
2537 int offset;
2538
2539 gcc_assert (val1->val_class == dw_val_class_loc);
2540 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2541
2542 fputc (',', asm_out_file);
2543 dw2_asm_output_data_raw (2, offset);
2544 }
2545 break;
2546
2547 case DW_OP_regx:
2548 {
2549 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2550 gcc_assert (size_of_uleb128 (r)
2551 == size_of_uleb128 (val1->v.val_unsigned));
2552 fputc (',', asm_out_file);
2553 dw2_asm_output_data_uleb128_raw (r);
2554 }
2555 break;
2556
2557 case DW_OP_constu:
2558 case DW_OP_plus_uconst:
2559 case DW_OP_piece:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2562 break;
2563
2564 case DW_OP_bit_piece:
2565 fputc (',', asm_out_file);
2566 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2567 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2568 break;
2569
2570 case DW_OP_consts:
2571 case DW_OP_breg0:
2572 case DW_OP_breg1:
2573 case DW_OP_breg2:
2574 case DW_OP_breg3:
2575 case DW_OP_breg4:
2576 case DW_OP_breg5:
2577 case DW_OP_breg6:
2578 case DW_OP_breg7:
2579 case DW_OP_breg8:
2580 case DW_OP_breg9:
2581 case DW_OP_breg10:
2582 case DW_OP_breg11:
2583 case DW_OP_breg12:
2584 case DW_OP_breg13:
2585 case DW_OP_breg14:
2586 case DW_OP_breg15:
2587 case DW_OP_breg16:
2588 case DW_OP_breg17:
2589 case DW_OP_breg18:
2590 case DW_OP_breg19:
2591 case DW_OP_breg20:
2592 case DW_OP_breg21:
2593 case DW_OP_breg22:
2594 case DW_OP_breg23:
2595 case DW_OP_breg24:
2596 case DW_OP_breg25:
2597 case DW_OP_breg26:
2598 case DW_OP_breg27:
2599 case DW_OP_breg28:
2600 case DW_OP_breg29:
2601 case DW_OP_breg30:
2602 case DW_OP_breg31:
2603 case DW_OP_fbreg:
2604 fputc (',', asm_out_file);
2605 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2606 break;
2607
2608 case DW_OP_bregx:
2609 {
2610 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2611 gcc_assert (size_of_uleb128 (r)
2612 == size_of_uleb128 (val1->v.val_unsigned));
2613 fputc (',', asm_out_file);
2614 dw2_asm_output_data_uleb128_raw (r);
2615 fputc (',', asm_out_file);
2616 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2617 }
2618 break;
2619
2620 case DW_OP_implicit_pointer:
2621 case DW_OP_entry_value:
2622 case DW_OP_const_type:
2623 case DW_OP_regval_type:
2624 case DW_OP_deref_type:
2625 case DW_OP_convert:
2626 case DW_OP_reinterpret:
2627 case DW_OP_GNU_implicit_pointer:
2628 case DW_OP_GNU_entry_value:
2629 case DW_OP_GNU_const_type:
2630 case DW_OP_GNU_regval_type:
2631 case DW_OP_GNU_deref_type:
2632 case DW_OP_GNU_convert:
2633 case DW_OP_GNU_reinterpret:
2634 case DW_OP_GNU_parameter_ref:
2635 gcc_unreachable ();
2636 break;
2637
2638 default:
2639 /* Other codes have no operands. */
2640 break;
2641 }
2642 }
2643
2644 void
2645 output_loc_sequence_raw (dw_loc_descr_ref loc)
2646 {
2647 while (1)
2648 {
2649 enum dwarf_location_atom opc = loc->dw_loc_opc;
2650 /* Output the opcode. */
2651 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2652 {
2653 unsigned r = (opc - DW_OP_breg0);
2654 r = DWARF2_FRAME_REG_OUT (r, 1);
2655 gcc_assert (r <= 31);
2656 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2657 }
2658 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2659 {
2660 unsigned r = (opc - DW_OP_reg0);
2661 r = DWARF2_FRAME_REG_OUT (r, 1);
2662 gcc_assert (r <= 31);
2663 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2664 }
2665 /* Output the opcode. */
2666 fprintf (asm_out_file, "%#x", opc);
2667 output_loc_operands_raw (loc);
2668
2669 if (!loc->dw_loc_next)
2670 break;
2671 loc = loc->dw_loc_next;
2672
2673 fputc (',', asm_out_file);
2674 }
2675 }
2676
2677 /* This function builds a dwarf location descriptor sequence from a
2678 dw_cfa_location, adding the given OFFSET to the result of the
2679 expression. */
2680
2681 struct dw_loc_descr_node *
2682 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2683 {
2684 struct dw_loc_descr_node *head, *tmp;
2685
2686 offset += cfa->offset;
2687
2688 if (cfa->indirect)
2689 {
2690 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2691 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2692 head->dw_loc_oprnd1.val_entry = NULL;
2693 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2694 add_loc_descr (&head, tmp);
2695 loc_descr_plus_const (&head, offset);
2696 }
2697 else
2698 head = new_reg_loc_descr (cfa->reg, offset);
2699
2700 return head;
2701 }
2702
2703 /* This function builds a dwarf location descriptor sequence for
2704 the address at OFFSET from the CFA when stack is aligned to
2705 ALIGNMENT byte. */
2706
2707 struct dw_loc_descr_node *
2708 build_cfa_aligned_loc (dw_cfa_location *cfa,
2709 poly_int64 offset, HOST_WIDE_INT alignment)
2710 {
2711 struct dw_loc_descr_node *head;
2712 unsigned int dwarf_fp
2713 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2714
2715 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2716 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2717 {
2718 head = new_reg_loc_descr (dwarf_fp, 0);
2719 add_loc_descr (&head, int_loc_descriptor (alignment));
2720 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2721 loc_descr_plus_const (&head, offset);
2722 }
2723 else
2724 head = new_reg_loc_descr (dwarf_fp, offset);
2725 return head;
2726 }
2727 \f
2728 /* And now, the support for symbolic debugging information. */
2729
2730 /* .debug_str support. */
2731
2732 static void dwarf2out_init (const char *);
2733 static void dwarf2out_finish (const char *);
2734 static void dwarf2out_early_finish (const char *);
2735 static void dwarf2out_assembly_start (void);
2736 static void dwarf2out_define (unsigned int, const char *);
2737 static void dwarf2out_undef (unsigned int, const char *);
2738 static void dwarf2out_start_source_file (unsigned, const char *);
2739 static void dwarf2out_end_source_file (unsigned);
2740 static void dwarf2out_function_decl (tree);
2741 static void dwarf2out_begin_block (unsigned, unsigned);
2742 static void dwarf2out_end_block (unsigned, unsigned);
2743 static bool dwarf2out_ignore_block (const_tree);
2744 static void dwarf2out_early_global_decl (tree);
2745 static void dwarf2out_late_global_decl (tree);
2746 static void dwarf2out_type_decl (tree, int);
2747 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2748 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2749 dw_die_ref);
2750 static void dwarf2out_abstract_function (tree);
2751 static void dwarf2out_var_location (rtx_insn *);
2752 static void dwarf2out_inline_entry (tree);
2753 static void dwarf2out_size_function (tree);
2754 static void dwarf2out_begin_function (tree);
2755 static void dwarf2out_end_function (unsigned int);
2756 static void dwarf2out_register_main_translation_unit (tree unit);
2757 static void dwarf2out_set_name (tree, tree);
2758 static void dwarf2out_register_external_die (tree decl, const char *sym,
2759 unsigned HOST_WIDE_INT off);
2760 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2761 unsigned HOST_WIDE_INT *off);
2762
2763 /* The debug hooks structure. */
2764
2765 const struct gcc_debug_hooks dwarf2_debug_hooks =
2766 {
2767 dwarf2out_init,
2768 dwarf2out_finish,
2769 dwarf2out_early_finish,
2770 dwarf2out_assembly_start,
2771 dwarf2out_define,
2772 dwarf2out_undef,
2773 dwarf2out_start_source_file,
2774 dwarf2out_end_source_file,
2775 dwarf2out_begin_block,
2776 dwarf2out_end_block,
2777 dwarf2out_ignore_block,
2778 dwarf2out_source_line,
2779 dwarf2out_begin_prologue,
2780 #if VMS_DEBUGGING_INFO
2781 dwarf2out_vms_end_prologue,
2782 dwarf2out_vms_begin_epilogue,
2783 #else
2784 debug_nothing_int_charstar,
2785 debug_nothing_int_charstar,
2786 #endif
2787 dwarf2out_end_epilogue,
2788 dwarf2out_begin_function,
2789 dwarf2out_end_function, /* end_function */
2790 dwarf2out_register_main_translation_unit,
2791 dwarf2out_function_decl, /* function_decl */
2792 dwarf2out_early_global_decl,
2793 dwarf2out_late_global_decl,
2794 dwarf2out_type_decl, /* type_decl */
2795 dwarf2out_imported_module_or_decl,
2796 dwarf2out_die_ref_for_decl,
2797 dwarf2out_register_external_die,
2798 debug_nothing_tree, /* deferred_inline_function */
2799 /* The DWARF 2 backend tries to reduce debugging bloat by not
2800 emitting the abstract description of inline functions until
2801 something tries to reference them. */
2802 dwarf2out_abstract_function, /* outlining_inline_function */
2803 debug_nothing_rtx_code_label, /* label */
2804 debug_nothing_int, /* handle_pch */
2805 dwarf2out_var_location,
2806 dwarf2out_inline_entry, /* inline_entry */
2807 dwarf2out_size_function, /* size_function */
2808 dwarf2out_switch_text_section,
2809 dwarf2out_set_name,
2810 1, /* start_end_main_source_file */
2811 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2812 };
2813
2814 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2815 {
2816 dwarf2out_init,
2817 debug_nothing_charstar,
2818 debug_nothing_charstar,
2819 dwarf2out_assembly_start,
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 debug_nothing_int_charstar,
2823 debug_nothing_int,
2824 debug_nothing_int_int, /* begin_block */
2825 debug_nothing_int_int, /* end_block */
2826 debug_true_const_tree, /* ignore_block */
2827 dwarf2out_source_line, /* source_line */
2828 debug_nothing_int_int_charstar, /* begin_prologue */
2829 debug_nothing_int_charstar, /* end_prologue */
2830 debug_nothing_int_charstar, /* begin_epilogue */
2831 debug_nothing_int_charstar, /* end_epilogue */
2832 debug_nothing_tree, /* begin_function */
2833 debug_nothing_int, /* end_function */
2834 debug_nothing_tree, /* register_main_translation_unit */
2835 debug_nothing_tree, /* function_decl */
2836 debug_nothing_tree, /* early_global_decl */
2837 debug_nothing_tree, /* late_global_decl */
2838 debug_nothing_tree_int, /* type_decl */
2839 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2840 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2841 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2842 debug_nothing_tree, /* deferred_inline_function */
2843 debug_nothing_tree, /* outlining_inline_function */
2844 debug_nothing_rtx_code_label, /* label */
2845 debug_nothing_int, /* handle_pch */
2846 debug_nothing_rtx_insn, /* var_location */
2847 debug_nothing_tree, /* inline_entry */
2848 debug_nothing_tree, /* size_function */
2849 debug_nothing_void, /* switch_text_section */
2850 debug_nothing_tree_tree, /* set_name */
2851 0, /* start_end_main_source_file */
2852 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2853 };
2854 \f
2855 /* NOTE: In the comments in this file, many references are made to
2856 "Debugging Information Entries". This term is abbreviated as `DIE'
2857 throughout the remainder of this file. */
2858
2859 /* An internal representation of the DWARF output is built, and then
2860 walked to generate the DWARF debugging info. The walk of the internal
2861 representation is done after the entire program has been compiled.
2862 The types below are used to describe the internal representation. */
2863
2864 /* Whether to put type DIEs into their own section .debug_types instead
2865 of making them part of the .debug_info section. Only supported for
2866 Dwarf V4 or higher and the user didn't disable them through
2867 -fno-debug-types-section. It is more efficient to put them in a
2868 separate comdat sections since the linker will then be able to
2869 remove duplicates. But not all tools support .debug_types sections
2870 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2871 it is DW_UT_type unit type in .debug_info section. */
2872
2873 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2874
2875 /* Various DIE's use offsets relative to the beginning of the
2876 .debug_info section to refer to each other. */
2877
2878 typedef long int dw_offset;
2879
2880 struct comdat_type_node;
2881
2882 /* The entries in the line_info table more-or-less mirror the opcodes
2883 that are used in the real dwarf line table. Arrays of these entries
2884 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2885 supported. */
2886
2887 enum dw_line_info_opcode {
2888 /* Emit DW_LNE_set_address; the operand is the label index. */
2889 LI_set_address,
2890
2891 /* Emit a row to the matrix with the given line. This may be done
2892 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2893 special opcodes. */
2894 LI_set_line,
2895
2896 /* Emit a DW_LNS_set_file. */
2897 LI_set_file,
2898
2899 /* Emit a DW_LNS_set_column. */
2900 LI_set_column,
2901
2902 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2903 LI_negate_stmt,
2904
2905 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2906 LI_set_prologue_end,
2907 LI_set_epilogue_begin,
2908
2909 /* Emit a DW_LNE_set_discriminator. */
2910 LI_set_discriminator,
2911
2912 /* Output a Fixed Advance PC; the target PC is the label index; the
2913 base PC is the previous LI_adv_address or LI_set_address entry.
2914 We only use this when emitting debug views without assembler
2915 support, at explicit user request. Ideally, we should only use
2916 it when the offset might be zero but we can't tell: it's the only
2917 way to maybe change the PC without resetting the view number. */
2918 LI_adv_address
2919 };
2920
2921 typedef struct GTY(()) dw_line_info_struct {
2922 enum dw_line_info_opcode opcode;
2923 unsigned int val;
2924 } dw_line_info_entry;
2925
2926
2927 struct GTY(()) dw_line_info_table {
2928 /* The label that marks the end of this section. */
2929 const char *end_label;
2930
2931 /* The values for the last row of the matrix, as collected in the table.
2932 These are used to minimize the changes to the next row. */
2933 unsigned int file_num;
2934 unsigned int line_num;
2935 unsigned int column_num;
2936 int discrim_num;
2937 bool is_stmt;
2938 bool in_use;
2939
2940 /* This denotes the NEXT view number.
2941
2942 If it is 0, it is known that the NEXT view will be the first view
2943 at the given PC.
2944
2945 If it is -1, we're forcing the view number to be reset, e.g. at a
2946 function entry.
2947
2948 The meaning of other nonzero values depends on whether we're
2949 computing views internally or leaving it for the assembler to do
2950 so. If we're emitting them internally, view denotes the view
2951 number since the last known advance of PC. If we're leaving it
2952 for the assembler, it denotes the LVU label number that we're
2953 going to ask the assembler to assign. */
2954 var_loc_view view;
2955
2956 /* This counts the number of symbolic views emitted in this table
2957 since the latest view reset. Its max value, over all tables,
2958 sets symview_upper_bound. */
2959 var_loc_view symviews_since_reset;
2960
2961 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2962 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2963 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
2964 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
2965
2966 vec<dw_line_info_entry, va_gc> *entries;
2967 };
2968
2969 /* This is an upper bound for view numbers that the assembler may
2970 assign to symbolic views output in this translation. It is used to
2971 decide how big a field to use to represent view numbers in
2972 symview-classed attributes. */
2973
2974 static var_loc_view symview_upper_bound;
2975
2976 /* If we're keep track of location views and their reset points, and
2977 INSN is a reset point (i.e., it necessarily advances the PC), mark
2978 the next view in TABLE as reset. */
2979
2980 static void
2981 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
2982 {
2983 if (!debug_internal_reset_location_views)
2984 return;
2985
2986 /* Maybe turn (part of?) this test into a default target hook. */
2987 int reset = 0;
2988
2989 if (targetm.reset_location_view)
2990 reset = targetm.reset_location_view (insn);
2991
2992 if (reset)
2993 ;
2994 else if (JUMP_TABLE_DATA_P (insn))
2995 reset = 1;
2996 else if (GET_CODE (insn) == USE
2997 || GET_CODE (insn) == CLOBBER
2998 || GET_CODE (insn) == ASM_INPUT
2999 || asm_noperands (insn) >= 0)
3000 ;
3001 else if (get_attr_min_length (insn) > 0)
3002 reset = 1;
3003
3004 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3005 RESET_NEXT_VIEW (table->view);
3006 }
3007
3008 /* Each DIE attribute has a field specifying the attribute kind,
3009 a link to the next attribute in the chain, and an attribute value.
3010 Attributes are typically linked below the DIE they modify. */
3011
3012 typedef struct GTY(()) dw_attr_struct {
3013 enum dwarf_attribute dw_attr;
3014 dw_val_node dw_attr_val;
3015 }
3016 dw_attr_node;
3017
3018
3019 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3020 The children of each node form a circular list linked by
3021 die_sib. die_child points to the node *before* the "first" child node. */
3022
3023 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3024 union die_symbol_or_type_node
3025 {
3026 const char * GTY ((tag ("0"))) die_symbol;
3027 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3028 }
3029 GTY ((desc ("%0.comdat_type_p"))) die_id;
3030 vec<dw_attr_node, va_gc> *die_attr;
3031 dw_die_ref die_parent;
3032 dw_die_ref die_child;
3033 dw_die_ref die_sib;
3034 dw_die_ref die_definition; /* ref from a specification to its definition */
3035 dw_offset die_offset;
3036 unsigned long die_abbrev;
3037 int die_mark;
3038 unsigned int decl_id;
3039 enum dwarf_tag die_tag;
3040 /* Die is used and must not be pruned as unused. */
3041 BOOL_BITFIELD die_perennial_p : 1;
3042 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3043 /* For an external ref to die_symbol if die_offset contains an extra
3044 offset to that symbol. */
3045 BOOL_BITFIELD with_offset : 1;
3046 /* Whether this DIE was removed from the DIE tree, for example via
3047 prune_unused_types. We don't consider those present from the
3048 DIE lookup routines. */
3049 BOOL_BITFIELD removed : 1;
3050 /* Lots of spare bits. */
3051 }
3052 die_node;
3053
3054 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3055 static bool early_dwarf;
3056 static bool early_dwarf_finished;
3057 struct set_early_dwarf {
3058 bool saved;
3059 set_early_dwarf () : saved(early_dwarf)
3060 {
3061 gcc_assert (! early_dwarf_finished);
3062 early_dwarf = true;
3063 }
3064 ~set_early_dwarf () { early_dwarf = saved; }
3065 };
3066
3067 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3068 #define FOR_EACH_CHILD(die, c, expr) do { \
3069 c = die->die_child; \
3070 if (c) do { \
3071 c = c->die_sib; \
3072 expr; \
3073 } while (c != die->die_child); \
3074 } while (0)
3075
3076 /* The pubname structure */
3077
3078 typedef struct GTY(()) pubname_struct {
3079 dw_die_ref die;
3080 const char *name;
3081 }
3082 pubname_entry;
3083
3084
3085 struct GTY(()) dw_ranges {
3086 const char *label;
3087 /* If this is positive, it's a block number, otherwise it's a
3088 bitwise-negated index into dw_ranges_by_label. */
3089 int num;
3090 /* Index for the range list for DW_FORM_rnglistx. */
3091 unsigned int idx : 31;
3092 /* True if this range might be possibly in a different section
3093 from previous entry. */
3094 unsigned int maybe_new_sec : 1;
3095 };
3096
3097 /* A structure to hold a macinfo entry. */
3098
3099 typedef struct GTY(()) macinfo_struct {
3100 unsigned char code;
3101 unsigned HOST_WIDE_INT lineno;
3102 const char *info;
3103 }
3104 macinfo_entry;
3105
3106
3107 struct GTY(()) dw_ranges_by_label {
3108 const char *begin;
3109 const char *end;
3110 };
3111
3112 /* The comdat type node structure. */
3113 struct GTY(()) comdat_type_node
3114 {
3115 dw_die_ref root_die;
3116 dw_die_ref type_die;
3117 dw_die_ref skeleton_die;
3118 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3119 comdat_type_node *next;
3120 };
3121
3122 /* A list of DIEs for which we can't determine ancestry (parent_die
3123 field) just yet. Later in dwarf2out_finish we will fill in the
3124 missing bits. */
3125 typedef struct GTY(()) limbo_die_struct {
3126 dw_die_ref die;
3127 /* The tree for which this DIE was created. We use this to
3128 determine ancestry later. */
3129 tree created_for;
3130 struct limbo_die_struct *next;
3131 }
3132 limbo_die_node;
3133
3134 typedef struct skeleton_chain_struct
3135 {
3136 dw_die_ref old_die;
3137 dw_die_ref new_die;
3138 struct skeleton_chain_struct *parent;
3139 }
3140 skeleton_chain_node;
3141
3142 /* Define a macro which returns nonzero for a TYPE_DECL which was
3143 implicitly generated for a type.
3144
3145 Note that, unlike the C front-end (which generates a NULL named
3146 TYPE_DECL node for each complete tagged type, each array type,
3147 and each function type node created) the C++ front-end generates
3148 a _named_ TYPE_DECL node for each tagged type node created.
3149 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3150 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3151 front-end, but for each type, tagged or not. */
3152
3153 #define TYPE_DECL_IS_STUB(decl) \
3154 (DECL_NAME (decl) == NULL_TREE \
3155 || (DECL_ARTIFICIAL (decl) \
3156 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3157 /* This is necessary for stub decls that \
3158 appear in nested inline functions. */ \
3159 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3160 && (decl_ultimate_origin (decl) \
3161 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3162
3163 /* Information concerning the compilation unit's programming
3164 language, and compiler version. */
3165
3166 /* Fixed size portion of the DWARF compilation unit header. */
3167 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3168 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3169 + (dwarf_version >= 5 ? 4 : 3))
3170
3171 /* Fixed size portion of the DWARF comdat type unit header. */
3172 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3173 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3174 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3175
3176 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3177 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3178 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3179
3180 /* Fixed size portion of public names info. */
3181 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3182
3183 /* Fixed size portion of the address range info. */
3184 #define DWARF_ARANGES_HEADER_SIZE \
3185 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3186 DWARF2_ADDR_SIZE * 2) \
3187 - DWARF_INITIAL_LENGTH_SIZE)
3188
3189 /* Size of padding portion in the address range info. It must be
3190 aligned to twice the pointer size. */
3191 #define DWARF_ARANGES_PAD_SIZE \
3192 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3193 DWARF2_ADDR_SIZE * 2) \
3194 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3195
3196 /* Use assembler line directives if available. */
3197 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3198 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3199 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3200 #else
3201 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3202 #endif
3203 #endif
3204
3205 /* Use assembler views in line directives if available. */
3206 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3207 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3208 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3209 #else
3210 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3211 #endif
3212 #endif
3213
3214 /* Return true if GCC configure detected assembler support for .loc. */
3215
3216 bool
3217 dwarf2out_default_as_loc_support (void)
3218 {
3219 return DWARF2_ASM_LINE_DEBUG_INFO;
3220 #if (GCC_VERSION >= 3000)
3221 # undef DWARF2_ASM_LINE_DEBUG_INFO
3222 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3223 #endif
3224 }
3225
3226 /* Return true if GCC configure detected assembler support for views
3227 in .loc directives. */
3228
3229 bool
3230 dwarf2out_default_as_locview_support (void)
3231 {
3232 return DWARF2_ASM_VIEW_DEBUG_INFO;
3233 #if (GCC_VERSION >= 3000)
3234 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3235 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3236 #endif
3237 }
3238
3239 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3240 view computation, and it refers to a view identifier for which we
3241 will not emit a label because it is known to map to a view number
3242 zero. We won't allocate the bitmap if we're not using assembler
3243 support for location views, but we have to make the variable
3244 visible for GGC and for code that will be optimized out for lack of
3245 support but that's still parsed and compiled. We could abstract it
3246 out with macros, but it's not worth it. */
3247 static GTY(()) bitmap zero_view_p;
3248
3249 /* Evaluate to TRUE iff N is known to identify the first location view
3250 at its PC. When not using assembler location view computation,
3251 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3252 and views label numbers recorded in it are the ones known to be
3253 zero. */
3254 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3255 || (N) == (var_loc_view)-1 \
3256 || (zero_view_p \
3257 && bitmap_bit_p (zero_view_p, (N))))
3258
3259 /* Return true iff we're to emit .loc directives for the assembler to
3260 generate line number sections.
3261
3262 When we're not emitting views, all we need from the assembler is
3263 support for .loc directives.
3264
3265 If we are emitting views, we can only use the assembler's .loc
3266 support if it also supports views.
3267
3268 When the compiler is emitting the line number programs and
3269 computing view numbers itself, it resets view numbers at known PC
3270 changes and counts from that, and then it emits view numbers as
3271 literal constants in locviewlists. There are cases in which the
3272 compiler is not sure about PC changes, e.g. when extra alignment is
3273 requested for a label. In these cases, the compiler may not reset
3274 the view counter, and the potential PC advance in the line number
3275 program will use an opcode that does not reset the view counter
3276 even if the PC actually changes, so that compiler and debug info
3277 consumer can keep view numbers in sync.
3278
3279 When the compiler defers view computation to the assembler, it
3280 emits symbolic view numbers in locviewlists, with the exception of
3281 views known to be zero (forced resets, or reset after
3282 compiler-visible PC changes): instead of emitting symbols for
3283 these, we emit literal zero and assert the assembler agrees with
3284 the compiler's assessment. We could use symbolic views everywhere,
3285 instead of special-casing zero views, but then we'd be unable to
3286 optimize out locviewlists that contain only zeros. */
3287
3288 static bool
3289 output_asm_line_debug_info (void)
3290 {
3291 return (dwarf2out_as_loc_support
3292 && (dwarf2out_as_locview_support
3293 || !debug_variable_location_views));
3294 }
3295
3296 /* Minimum line offset in a special line info. opcode.
3297 This value was chosen to give a reasonable range of values. */
3298 #define DWARF_LINE_BASE -10
3299
3300 /* First special line opcode - leave room for the standard opcodes. */
3301 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3302
3303 /* Range of line offsets in a special line info. opcode. */
3304 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3305
3306 /* Flag that indicates the initial value of the is_stmt_start flag.
3307 In the present implementation, we do not mark any lines as
3308 the beginning of a source statement, because that information
3309 is not made available by the GCC front-end. */
3310 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3311
3312 /* Maximum number of operations per instruction bundle. */
3313 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3314 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3315 #endif
3316
3317 /* This location is used by calc_die_sizes() to keep track
3318 the offset of each DIE within the .debug_info section. */
3319 static unsigned long next_die_offset;
3320
3321 /* Record the root of the DIE's built for the current compilation unit. */
3322 static GTY(()) dw_die_ref single_comp_unit_die;
3323
3324 /* A list of type DIEs that have been separated into comdat sections. */
3325 static GTY(()) comdat_type_node *comdat_type_list;
3326
3327 /* A list of CU DIEs that have been separated. */
3328 static GTY(()) limbo_die_node *cu_die_list;
3329
3330 /* A list of DIEs with a NULL parent waiting to be relocated. */
3331 static GTY(()) limbo_die_node *limbo_die_list;
3332
3333 /* A list of DIEs for which we may have to generate
3334 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3335 static GTY(()) limbo_die_node *deferred_asm_name;
3336
3337 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3338 {
3339 typedef const char *compare_type;
3340
3341 static hashval_t hash (dwarf_file_data *);
3342 static bool equal (dwarf_file_data *, const char *);
3343 };
3344
3345 /* Filenames referenced by this compilation unit. */
3346 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3347
3348 struct decl_die_hasher : ggc_ptr_hash<die_node>
3349 {
3350 typedef tree compare_type;
3351
3352 static hashval_t hash (die_node *);
3353 static bool equal (die_node *, tree);
3354 };
3355 /* A hash table of references to DIE's that describe declarations.
3356 The key is a DECL_UID() which is a unique number identifying each decl. */
3357 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3358
3359 struct GTY ((for_user)) variable_value_struct {
3360 unsigned int decl_id;
3361 vec<dw_die_ref, va_gc> *dies;
3362 };
3363
3364 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3365 {
3366 typedef tree compare_type;
3367
3368 static hashval_t hash (variable_value_struct *);
3369 static bool equal (variable_value_struct *, tree);
3370 };
3371 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3372 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3373 DECL_CONTEXT of the referenced VAR_DECLs. */
3374 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3375
3376 struct block_die_hasher : ggc_ptr_hash<die_struct>
3377 {
3378 static hashval_t hash (die_struct *);
3379 static bool equal (die_struct *, die_struct *);
3380 };
3381
3382 /* A hash table of references to DIE's that describe COMMON blocks.
3383 The key is DECL_UID() ^ die_parent. */
3384 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3385
3386 typedef struct GTY(()) die_arg_entry_struct {
3387 dw_die_ref die;
3388 tree arg;
3389 } die_arg_entry;
3390
3391
3392 /* Node of the variable location list. */
3393 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3394 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3395 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3396 in mode of the EXPR_LIST node and first EXPR_LIST operand
3397 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3398 location or NULL for padding. For larger bitsizes,
3399 mode is 0 and first operand is a CONCAT with bitsize
3400 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3401 NULL as second operand. */
3402 rtx GTY (()) loc;
3403 const char * GTY (()) label;
3404 struct var_loc_node * GTY (()) next;
3405 var_loc_view view;
3406 };
3407
3408 /* Variable location list. */
3409 struct GTY ((for_user)) var_loc_list_def {
3410 struct var_loc_node * GTY (()) first;
3411
3412 /* Pointer to the last but one or last element of the
3413 chained list. If the list is empty, both first and
3414 last are NULL, if the list contains just one node
3415 or the last node certainly is not redundant, it points
3416 to the last node, otherwise points to the last but one.
3417 Do not mark it for GC because it is marked through the chain. */
3418 struct var_loc_node * GTY ((skip ("%h"))) last;
3419
3420 /* Pointer to the last element before section switch,
3421 if NULL, either sections weren't switched or first
3422 is after section switch. */
3423 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3424
3425 /* DECL_UID of the variable decl. */
3426 unsigned int decl_id;
3427 };
3428 typedef struct var_loc_list_def var_loc_list;
3429
3430 /* Call argument location list. */
3431 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3432 rtx GTY (()) call_arg_loc_note;
3433 const char * GTY (()) label;
3434 tree GTY (()) block;
3435 bool tail_call_p;
3436 rtx GTY (()) symbol_ref;
3437 struct call_arg_loc_node * GTY (()) next;
3438 };
3439
3440
3441 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3442 {
3443 typedef const_tree compare_type;
3444
3445 static hashval_t hash (var_loc_list *);
3446 static bool equal (var_loc_list *, const_tree);
3447 };
3448
3449 /* Table of decl location linked lists. */
3450 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3451
3452 /* Head and tail of call_arg_loc chain. */
3453 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3454 static struct call_arg_loc_node *call_arg_loc_last;
3455
3456 /* Number of call sites in the current function. */
3457 static int call_site_count = -1;
3458 /* Number of tail call sites in the current function. */
3459 static int tail_call_site_count = -1;
3460
3461 /* A cached location list. */
3462 struct GTY ((for_user)) cached_dw_loc_list_def {
3463 /* The DECL_UID of the decl that this entry describes. */
3464 unsigned int decl_id;
3465
3466 /* The cached location list. */
3467 dw_loc_list_ref loc_list;
3468 };
3469 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3470
3471 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3472 {
3473
3474 typedef const_tree compare_type;
3475
3476 static hashval_t hash (cached_dw_loc_list *);
3477 static bool equal (cached_dw_loc_list *, const_tree);
3478 };
3479
3480 /* Table of cached location lists. */
3481 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3482
3483 /* A vector of references to DIE's that are uniquely identified by their tag,
3484 presence/absence of children DIE's, and list of attribute/value pairs. */
3485 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3486
3487 /* A hash map to remember the stack usage for DWARF procedures. The value
3488 stored is the stack size difference between before the DWARF procedure
3489 invokation and after it returned. In other words, for a DWARF procedure
3490 that consumes N stack slots and that pushes M ones, this stores M - N. */
3491 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3492
3493 /* A global counter for generating labels for line number data. */
3494 static unsigned int line_info_label_num;
3495
3496 /* The current table to which we should emit line number information
3497 for the current function. This will be set up at the beginning of
3498 assembly for the function. */
3499 static GTY(()) dw_line_info_table *cur_line_info_table;
3500
3501 /* The two default tables of line number info. */
3502 static GTY(()) dw_line_info_table *text_section_line_info;
3503 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3504
3505 /* The set of all non-default tables of line number info. */
3506 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3507
3508 /* A flag to tell pubnames/types export if there is an info section to
3509 refer to. */
3510 static bool info_section_emitted;
3511
3512 /* A pointer to the base of a table that contains a list of publicly
3513 accessible names. */
3514 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3515
3516 /* A pointer to the base of a table that contains a list of publicly
3517 accessible types. */
3518 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3519
3520 /* A pointer to the base of a table that contains a list of macro
3521 defines/undefines (and file start/end markers). */
3522 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3523
3524 /* True if .debug_macinfo or .debug_macros section is going to be
3525 emitted. */
3526 #define have_macinfo \
3527 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3528 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3529 && !macinfo_table->is_empty ())
3530
3531 /* Vector of dies for which we should generate .debug_ranges info. */
3532 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3533
3534 /* Vector of pairs of labels referenced in ranges_table. */
3535 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3536
3537 /* Whether we have location lists that need outputting */
3538 static GTY(()) bool have_location_lists;
3539
3540 /* Unique label counter. */
3541 static GTY(()) unsigned int loclabel_num;
3542
3543 /* Unique label counter for point-of-call tables. */
3544 static GTY(()) unsigned int poc_label_num;
3545
3546 /* The last file entry emitted by maybe_emit_file(). */
3547 static GTY(()) struct dwarf_file_data * last_emitted_file;
3548
3549 /* Number of internal labels generated by gen_internal_sym(). */
3550 static GTY(()) int label_num;
3551
3552 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3553
3554 /* Instances of generic types for which we need to generate debug
3555 info that describe their generic parameters and arguments. That
3556 generation needs to happen once all types are properly laid out so
3557 we do it at the end of compilation. */
3558 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3559
3560 /* Offset from the "steady-state frame pointer" to the frame base,
3561 within the current function. */
3562 static poly_int64 frame_pointer_fb_offset;
3563 static bool frame_pointer_fb_offset_valid;
3564
3565 static vec<dw_die_ref> base_types;
3566
3567 /* Flags to represent a set of attribute classes for attributes that represent
3568 a scalar value (bounds, pointers, ...). */
3569 enum dw_scalar_form
3570 {
3571 dw_scalar_form_constant = 0x01,
3572 dw_scalar_form_exprloc = 0x02,
3573 dw_scalar_form_reference = 0x04
3574 };
3575
3576 /* Forward declarations for functions defined in this file. */
3577
3578 static int is_pseudo_reg (const_rtx);
3579 static tree type_main_variant (tree);
3580 static int is_tagged_type (const_tree);
3581 static const char *dwarf_tag_name (unsigned);
3582 static const char *dwarf_attr_name (unsigned);
3583 static const char *dwarf_form_name (unsigned);
3584 static tree decl_ultimate_origin (const_tree);
3585 static tree decl_class_context (tree);
3586 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3587 static inline enum dw_val_class AT_class (dw_attr_node *);
3588 static inline unsigned int AT_index (dw_attr_node *);
3589 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3590 static inline unsigned AT_flag (dw_attr_node *);
3591 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3592 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3593 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3594 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3595 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3596 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3597 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3598 unsigned int, unsigned char *);
3599 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3600 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3601 static inline const char *AT_string (dw_attr_node *);
3602 static enum dwarf_form AT_string_form (dw_attr_node *);
3603 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3604 static void add_AT_specification (dw_die_ref, dw_die_ref);
3605 static inline dw_die_ref AT_ref (dw_attr_node *);
3606 static inline int AT_ref_external (dw_attr_node *);
3607 static inline void set_AT_ref_external (dw_attr_node *, int);
3608 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3609 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3610 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3611 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3612 dw_loc_list_ref);
3613 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3614 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3615 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3616 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3617 static void remove_addr_table_entry (addr_table_entry *);
3618 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3619 static inline rtx AT_addr (dw_attr_node *);
3620 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3621 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3622 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3623 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3624 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3625 const char *);
3626 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3627 unsigned HOST_WIDE_INT);
3628 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3629 unsigned long, bool);
3630 static inline const char *AT_lbl (dw_attr_node *);
3631 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3632 static const char *get_AT_low_pc (dw_die_ref);
3633 static const char *get_AT_hi_pc (dw_die_ref);
3634 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3635 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3636 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3637 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3638 static bool is_cxx (void);
3639 static bool is_cxx (const_tree);
3640 static bool is_fortran (void);
3641 static bool is_ada (void);
3642 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3643 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3644 static void add_child_die (dw_die_ref, dw_die_ref);
3645 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3646 static dw_die_ref lookup_type_die (tree);
3647 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3648 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3649 static void equate_type_number_to_die (tree, dw_die_ref);
3650 static dw_die_ref lookup_decl_die (tree);
3651 static var_loc_list *lookup_decl_loc (const_tree);
3652 static void equate_decl_number_to_die (tree, dw_die_ref);
3653 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3654 static void print_spaces (FILE *);
3655 static void print_die (dw_die_ref, FILE *);
3656 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3657 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3658 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3659 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3660 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3661 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3662 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3663 struct md5_ctx *, int *);
3664 struct checksum_attributes;
3665 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3666 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3667 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3668 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3669 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3670 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3671 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3672 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3673 static int is_type_die (dw_die_ref);
3674 static int is_comdat_die (dw_die_ref);
3675 static inline bool is_template_instantiation (dw_die_ref);
3676 static int is_declaration_die (dw_die_ref);
3677 static int should_move_die_to_comdat (dw_die_ref);
3678 static dw_die_ref clone_as_declaration (dw_die_ref);
3679 static dw_die_ref clone_die (dw_die_ref);
3680 static dw_die_ref clone_tree (dw_die_ref);
3681 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3682 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3683 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3684 static dw_die_ref generate_skeleton (dw_die_ref);
3685 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3686 dw_die_ref,
3687 dw_die_ref);
3688 static void break_out_comdat_types (dw_die_ref);
3689 static void copy_decls_for_unworthy_types (dw_die_ref);
3690
3691 static void add_sibling_attributes (dw_die_ref);
3692 static void output_location_lists (dw_die_ref);
3693 static int constant_size (unsigned HOST_WIDE_INT);
3694 static unsigned long size_of_die (dw_die_ref);
3695 static void calc_die_sizes (dw_die_ref);
3696 static void calc_base_type_die_sizes (void);
3697 static void mark_dies (dw_die_ref);
3698 static void unmark_dies (dw_die_ref);
3699 static void unmark_all_dies (dw_die_ref);
3700 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3701 static unsigned long size_of_aranges (void);
3702 static enum dwarf_form value_format (dw_attr_node *);
3703 static void output_value_format (dw_attr_node *);
3704 static void output_abbrev_section (void);
3705 static void output_die_abbrevs (unsigned long, dw_die_ref);
3706 static void output_die (dw_die_ref);
3707 static void output_compilation_unit_header (enum dwarf_unit_type);
3708 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3709 static void output_comdat_type_unit (comdat_type_node *);
3710 static const char *dwarf2_name (tree, int);
3711 static void add_pubname (tree, dw_die_ref);
3712 static void add_enumerator_pubname (const char *, dw_die_ref);
3713 static void add_pubname_string (const char *, dw_die_ref);
3714 static void add_pubtype (tree, dw_die_ref);
3715 static void output_pubnames (vec<pubname_entry, va_gc> *);
3716 static void output_aranges (void);
3717 static unsigned int add_ranges (const_tree, bool = false);
3718 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3719 bool *, bool);
3720 static void output_ranges (void);
3721 static dw_line_info_table *new_line_info_table (void);
3722 static void output_line_info (bool);
3723 static void output_file_names (void);
3724 static dw_die_ref base_type_die (tree, bool);
3725 static int is_base_type (tree);
3726 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3727 static int decl_quals (const_tree);
3728 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3729 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3730 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3731 static int type_is_enum (const_tree);
3732 static unsigned int dbx_reg_number (const_rtx);
3733 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3734 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3735 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3736 enum var_init_status);
3737 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3738 enum var_init_status);
3739 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3740 enum var_init_status);
3741 static int is_based_loc (const_rtx);
3742 static bool resolve_one_addr (rtx *);
3743 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3744 enum var_init_status);
3745 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3746 enum var_init_status);
3747 struct loc_descr_context;
3748 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3749 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3750 static dw_loc_list_ref loc_list_from_tree (tree, int,
3751 struct loc_descr_context *);
3752 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3753 struct loc_descr_context *);
3754 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3755 static tree field_type (const_tree);
3756 static unsigned int simple_type_align_in_bits (const_tree);
3757 static unsigned int simple_decl_align_in_bits (const_tree);
3758 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3759 struct vlr_context;
3760 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3761 HOST_WIDE_INT *);
3762 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3763 dw_loc_list_ref);
3764 static void add_data_member_location_attribute (dw_die_ref, tree,
3765 struct vlr_context *);
3766 static bool add_const_value_attribute (dw_die_ref, rtx);
3767 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3768 static void insert_wide_int (const wide_int &, unsigned char *, int);
3769 static void insert_float (const_rtx, unsigned char *);
3770 static rtx rtl_for_decl_location (tree);
3771 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3772 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3773 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3774 static void add_name_attribute (dw_die_ref, const char *);
3775 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3776 static void add_comp_dir_attribute (dw_die_ref);
3777 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3778 struct loc_descr_context *);
3779 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3780 struct loc_descr_context *);
3781 static void add_subscript_info (dw_die_ref, tree, bool);
3782 static void add_byte_size_attribute (dw_die_ref, tree);
3783 static void add_alignment_attribute (dw_die_ref, tree);
3784 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3785 struct vlr_context *);
3786 static void add_bit_size_attribute (dw_die_ref, tree);
3787 static void add_prototyped_attribute (dw_die_ref, tree);
3788 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3789 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3790 static void add_src_coords_attributes (dw_die_ref, tree);
3791 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3792 static void add_discr_value (dw_die_ref, dw_discr_value *);
3793 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3794 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3795 static void push_decl_scope (tree);
3796 static void pop_decl_scope (void);
3797 static dw_die_ref scope_die_for (tree, dw_die_ref);
3798 static inline int local_scope_p (dw_die_ref);
3799 static inline int class_scope_p (dw_die_ref);
3800 static inline int class_or_namespace_scope_p (dw_die_ref);
3801 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3802 static void add_calling_convention_attribute (dw_die_ref, tree);
3803 static const char *type_tag (const_tree);
3804 static tree member_declared_type (const_tree);
3805 #if 0
3806 static const char *decl_start_label (tree);
3807 #endif
3808 static void gen_array_type_die (tree, dw_die_ref);
3809 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3810 #if 0
3811 static void gen_entry_point_die (tree, dw_die_ref);
3812 #endif
3813 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3814 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3815 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3816 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3817 static void gen_formal_types_die (tree, dw_die_ref);
3818 static void gen_subprogram_die (tree, dw_die_ref);
3819 static void gen_variable_die (tree, tree, dw_die_ref);
3820 static void gen_const_die (tree, dw_die_ref);
3821 static void gen_label_die (tree, dw_die_ref);
3822 static void gen_lexical_block_die (tree, dw_die_ref);
3823 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3824 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3825 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3826 static dw_die_ref gen_compile_unit_die (const char *);
3827 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3828 static void gen_member_die (tree, dw_die_ref);
3829 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3830 enum debug_info_usage);
3831 static void gen_subroutine_type_die (tree, dw_die_ref);
3832 static void gen_typedef_die (tree, dw_die_ref);
3833 static void gen_type_die (tree, dw_die_ref);
3834 static void gen_block_die (tree, dw_die_ref);
3835 static void decls_for_scope (tree, dw_die_ref);
3836 static bool is_naming_typedef_decl (const_tree);
3837 static inline dw_die_ref get_context_die (tree);
3838 static void gen_namespace_die (tree, dw_die_ref);
3839 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3840 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3841 static dw_die_ref force_decl_die (tree);
3842 static dw_die_ref force_type_die (tree);
3843 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3844 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3845 static struct dwarf_file_data * lookup_filename (const char *);
3846 static void retry_incomplete_types (void);
3847 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3848 static void gen_generic_params_dies (tree);
3849 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3850 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3851 static void splice_child_die (dw_die_ref, dw_die_ref);
3852 static int file_info_cmp (const void *, const void *);
3853 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3854 const char *, var_loc_view, const char *);
3855 static void output_loc_list (dw_loc_list_ref);
3856 static char *gen_internal_sym (const char *);
3857 static bool want_pubnames (void);
3858
3859 static void prune_unmark_dies (dw_die_ref);
3860 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3861 static void prune_unused_types_mark (dw_die_ref, int);
3862 static void prune_unused_types_walk (dw_die_ref);
3863 static void prune_unused_types_walk_attribs (dw_die_ref);
3864 static void prune_unused_types_prune (dw_die_ref);
3865 static void prune_unused_types (void);
3866 static int maybe_emit_file (struct dwarf_file_data *fd);
3867 static inline const char *AT_vms_delta1 (dw_attr_node *);
3868 static inline const char *AT_vms_delta2 (dw_attr_node *);
3869 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3870 const char *, const char *);
3871 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3872 static void gen_remaining_tmpl_value_param_die_attribute (void);
3873 static bool generic_type_p (tree);
3874 static void schedule_generic_params_dies_gen (tree t);
3875 static void gen_scheduled_generic_parms_dies (void);
3876 static void resolve_variable_values (void);
3877
3878 static const char *comp_dir_string (void);
3879
3880 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3881
3882 /* enum for tracking thread-local variables whose address is really an offset
3883 relative to the TLS pointer, which will need link-time relocation, but will
3884 not need relocation by the DWARF consumer. */
3885
3886 enum dtprel_bool
3887 {
3888 dtprel_false = 0,
3889 dtprel_true = 1
3890 };
3891
3892 /* Return the operator to use for an address of a variable. For dtprel_true, we
3893 use DW_OP_const*. For regular variables, which need both link-time
3894 relocation and consumer-level relocation (e.g., to account for shared objects
3895 loaded at a random address), we use DW_OP_addr*. */
3896
3897 static inline enum dwarf_location_atom
3898 dw_addr_op (enum dtprel_bool dtprel)
3899 {
3900 if (dtprel == dtprel_true)
3901 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3902 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3903 else
3904 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3905 }
3906
3907 /* Return a pointer to a newly allocated address location description. If
3908 dwarf_split_debug_info is true, then record the address with the appropriate
3909 relocation. */
3910 static inline dw_loc_descr_ref
3911 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3912 {
3913 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3914
3915 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3916 ref->dw_loc_oprnd1.v.val_addr = addr;
3917 ref->dtprel = dtprel;
3918 if (dwarf_split_debug_info)
3919 ref->dw_loc_oprnd1.val_entry
3920 = add_addr_table_entry (addr,
3921 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3922 else
3923 ref->dw_loc_oprnd1.val_entry = NULL;
3924
3925 return ref;
3926 }
3927
3928 /* Section names used to hold DWARF debugging information. */
3929
3930 #ifndef DEBUG_INFO_SECTION
3931 #define DEBUG_INFO_SECTION ".debug_info"
3932 #endif
3933 #ifndef DEBUG_DWO_INFO_SECTION
3934 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3935 #endif
3936 #ifndef DEBUG_LTO_INFO_SECTION
3937 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3938 #endif
3939 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3940 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3941 #endif
3942 #ifndef DEBUG_ABBREV_SECTION
3943 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3944 #endif
3945 #ifndef DEBUG_LTO_ABBREV_SECTION
3946 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3947 #endif
3948 #ifndef DEBUG_DWO_ABBREV_SECTION
3949 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3950 #endif
3951 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3952 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3953 #endif
3954 #ifndef DEBUG_ARANGES_SECTION
3955 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3956 #endif
3957 #ifndef DEBUG_ADDR_SECTION
3958 #define DEBUG_ADDR_SECTION ".debug_addr"
3959 #endif
3960 #ifndef DEBUG_MACINFO_SECTION
3961 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3962 #endif
3963 #ifndef DEBUG_LTO_MACINFO_SECTION
3964 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3965 #endif
3966 #ifndef DEBUG_DWO_MACINFO_SECTION
3967 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3968 #endif
3969 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3970 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3971 #endif
3972 #ifndef DEBUG_MACRO_SECTION
3973 #define DEBUG_MACRO_SECTION ".debug_macro"
3974 #endif
3975 #ifndef DEBUG_LTO_MACRO_SECTION
3976 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3977 #endif
3978 #ifndef DEBUG_DWO_MACRO_SECTION
3979 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3980 #endif
3981 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3982 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3983 #endif
3984 #ifndef DEBUG_LINE_SECTION
3985 #define DEBUG_LINE_SECTION ".debug_line"
3986 #endif
3987 #ifndef DEBUG_LTO_LINE_SECTION
3988 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3989 #endif
3990 #ifndef DEBUG_DWO_LINE_SECTION
3991 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3992 #endif
3993 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3994 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3995 #endif
3996 #ifndef DEBUG_LOC_SECTION
3997 #define DEBUG_LOC_SECTION ".debug_loc"
3998 #endif
3999 #ifndef DEBUG_DWO_LOC_SECTION
4000 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4001 #endif
4002 #ifndef DEBUG_LOCLISTS_SECTION
4003 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4004 #endif
4005 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4006 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4007 #endif
4008 #ifndef DEBUG_PUBNAMES_SECTION
4009 #define DEBUG_PUBNAMES_SECTION \
4010 ((debug_generate_pub_sections == 2) \
4011 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4012 #endif
4013 #ifndef DEBUG_PUBTYPES_SECTION
4014 #define DEBUG_PUBTYPES_SECTION \
4015 ((debug_generate_pub_sections == 2) \
4016 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4017 #endif
4018 #ifndef DEBUG_STR_OFFSETS_SECTION
4019 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4020 #endif
4021 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4022 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4023 #endif
4024 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4025 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4026 #endif
4027 #ifndef DEBUG_STR_SECTION
4028 #define DEBUG_STR_SECTION ".debug_str"
4029 #endif
4030 #ifndef DEBUG_LTO_STR_SECTION
4031 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4032 #endif
4033 #ifndef DEBUG_STR_DWO_SECTION
4034 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4035 #endif
4036 #ifndef DEBUG_LTO_STR_DWO_SECTION
4037 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4038 #endif
4039 #ifndef DEBUG_RANGES_SECTION
4040 #define DEBUG_RANGES_SECTION ".debug_ranges"
4041 #endif
4042 #ifndef DEBUG_RNGLISTS_SECTION
4043 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4044 #endif
4045 #ifndef DEBUG_LINE_STR_SECTION
4046 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4047 #endif
4048 #ifndef DEBUG_LTO_LINE_STR_SECTION
4049 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4050 #endif
4051
4052 /* Standard ELF section names for compiled code and data. */
4053 #ifndef TEXT_SECTION_NAME
4054 #define TEXT_SECTION_NAME ".text"
4055 #endif
4056
4057 /* Section flags for .debug_str section. */
4058 #define DEBUG_STR_SECTION_FLAGS \
4059 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4060 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4061 : SECTION_DEBUG)
4062
4063 /* Section flags for .debug_str.dwo section. */
4064 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4065
4066 /* Attribute used to refer to the macro section. */
4067 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4068 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4069
4070 /* Labels we insert at beginning sections we can reference instead of
4071 the section names themselves. */
4072
4073 #ifndef TEXT_SECTION_LABEL
4074 #define TEXT_SECTION_LABEL "Ltext"
4075 #endif
4076 #ifndef COLD_TEXT_SECTION_LABEL
4077 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4078 #endif
4079 #ifndef DEBUG_LINE_SECTION_LABEL
4080 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4081 #endif
4082 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4083 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4084 #endif
4085 #ifndef DEBUG_INFO_SECTION_LABEL
4086 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4087 #endif
4088 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4089 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4090 #endif
4091 #ifndef DEBUG_ABBREV_SECTION_LABEL
4092 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4093 #endif
4094 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4095 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4096 #endif
4097 #ifndef DEBUG_ADDR_SECTION_LABEL
4098 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4099 #endif
4100 #ifndef DEBUG_LOC_SECTION_LABEL
4101 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4102 #endif
4103 #ifndef DEBUG_RANGES_SECTION_LABEL
4104 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4105 #endif
4106 #ifndef DEBUG_MACINFO_SECTION_LABEL
4107 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4108 #endif
4109 #ifndef DEBUG_MACRO_SECTION_LABEL
4110 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4111 #endif
4112 #define SKELETON_COMP_DIE_ABBREV 1
4113 #define SKELETON_TYPE_DIE_ABBREV 2
4114
4115 /* Definitions of defaults for formats and names of various special
4116 (artificial) labels which may be generated within this file (when the -g
4117 options is used and DWARF2_DEBUGGING_INFO is in effect.
4118 If necessary, these may be overridden from within the tm.h file, but
4119 typically, overriding these defaults is unnecessary. */
4120
4121 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4122 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4123 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4124 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4125 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4126 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4127 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4128 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4129 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4130 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4131 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4132 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4133 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4134 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4135 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4136
4137 #ifndef TEXT_END_LABEL
4138 #define TEXT_END_LABEL "Letext"
4139 #endif
4140 #ifndef COLD_END_LABEL
4141 #define COLD_END_LABEL "Letext_cold"
4142 #endif
4143 #ifndef BLOCK_BEGIN_LABEL
4144 #define BLOCK_BEGIN_LABEL "LBB"
4145 #endif
4146 #ifndef BLOCK_INLINE_ENTRY_LABEL
4147 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4148 #endif
4149 #ifndef BLOCK_END_LABEL
4150 #define BLOCK_END_LABEL "LBE"
4151 #endif
4152 #ifndef LINE_CODE_LABEL
4153 #define LINE_CODE_LABEL "LM"
4154 #endif
4155
4156 \f
4157 /* Return the root of the DIE's built for the current compilation unit. */
4158 static dw_die_ref
4159 comp_unit_die (void)
4160 {
4161 if (!single_comp_unit_die)
4162 single_comp_unit_die = gen_compile_unit_die (NULL);
4163 return single_comp_unit_die;
4164 }
4165
4166 /* We allow a language front-end to designate a function that is to be
4167 called to "demangle" any name before it is put into a DIE. */
4168
4169 static const char *(*demangle_name_func) (const char *);
4170
4171 void
4172 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4173 {
4174 demangle_name_func = func;
4175 }
4176
4177 /* Test if rtl node points to a pseudo register. */
4178
4179 static inline int
4180 is_pseudo_reg (const_rtx rtl)
4181 {
4182 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4183 || (GET_CODE (rtl) == SUBREG
4184 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4185 }
4186
4187 /* Return a reference to a type, with its const and volatile qualifiers
4188 removed. */
4189
4190 static inline tree
4191 type_main_variant (tree type)
4192 {
4193 type = TYPE_MAIN_VARIANT (type);
4194
4195 /* ??? There really should be only one main variant among any group of
4196 variants of a given type (and all of the MAIN_VARIANT values for all
4197 members of the group should point to that one type) but sometimes the C
4198 front-end messes this up for array types, so we work around that bug
4199 here. */
4200 if (TREE_CODE (type) == ARRAY_TYPE)
4201 while (type != TYPE_MAIN_VARIANT (type))
4202 type = TYPE_MAIN_VARIANT (type);
4203
4204 return type;
4205 }
4206
4207 /* Return nonzero if the given type node represents a tagged type. */
4208
4209 static inline int
4210 is_tagged_type (const_tree type)
4211 {
4212 enum tree_code code = TREE_CODE (type);
4213
4214 return (code == RECORD_TYPE || code == UNION_TYPE
4215 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4216 }
4217
4218 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4219
4220 static void
4221 get_ref_die_offset_label (char *label, dw_die_ref ref)
4222 {
4223 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4224 }
4225
4226 /* Return die_offset of a DIE reference to a base type. */
4227
4228 static unsigned long int
4229 get_base_type_offset (dw_die_ref ref)
4230 {
4231 if (ref->die_offset)
4232 return ref->die_offset;
4233 if (comp_unit_die ()->die_abbrev)
4234 {
4235 calc_base_type_die_sizes ();
4236 gcc_assert (ref->die_offset);
4237 }
4238 return ref->die_offset;
4239 }
4240
4241 /* Return die_offset of a DIE reference other than base type. */
4242
4243 static unsigned long int
4244 get_ref_die_offset (dw_die_ref ref)
4245 {
4246 gcc_assert (ref->die_offset);
4247 return ref->die_offset;
4248 }
4249
4250 /* Convert a DIE tag into its string name. */
4251
4252 static const char *
4253 dwarf_tag_name (unsigned int tag)
4254 {
4255 const char *name = get_DW_TAG_name (tag);
4256
4257 if (name != NULL)
4258 return name;
4259
4260 return "DW_TAG_<unknown>";
4261 }
4262
4263 /* Convert a DWARF attribute code into its string name. */
4264
4265 static const char *
4266 dwarf_attr_name (unsigned int attr)
4267 {
4268 const char *name;
4269
4270 switch (attr)
4271 {
4272 #if VMS_DEBUGGING_INFO
4273 case DW_AT_HP_prologue:
4274 return "DW_AT_HP_prologue";
4275 #else
4276 case DW_AT_MIPS_loop_unroll_factor:
4277 return "DW_AT_MIPS_loop_unroll_factor";
4278 #endif
4279
4280 #if VMS_DEBUGGING_INFO
4281 case DW_AT_HP_epilogue:
4282 return "DW_AT_HP_epilogue";
4283 #else
4284 case DW_AT_MIPS_stride:
4285 return "DW_AT_MIPS_stride";
4286 #endif
4287 }
4288
4289 name = get_DW_AT_name (attr);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_AT_<unknown>";
4295 }
4296
4297 /* Convert a DWARF value form code into its string name. */
4298
4299 static const char *
4300 dwarf_form_name (unsigned int form)
4301 {
4302 const char *name = get_DW_FORM_name (form);
4303
4304 if (name != NULL)
4305 return name;
4306
4307 return "DW_FORM_<unknown>";
4308 }
4309 \f
4310 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4311 instance of an inlined instance of a decl which is local to an inline
4312 function, so we have to trace all of the way back through the origin chain
4313 to find out what sort of node actually served as the original seed for the
4314 given block. */
4315
4316 static tree
4317 decl_ultimate_origin (const_tree decl)
4318 {
4319 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4320 return NULL_TREE;
4321
4322 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4323 we're trying to output the abstract instance of this function. */
4324 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4325 return NULL_TREE;
4326
4327 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4328 most distant ancestor, this should never happen. */
4329 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4330
4331 return DECL_ABSTRACT_ORIGIN (decl);
4332 }
4333
4334 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4335 of a virtual function may refer to a base class, so we check the 'this'
4336 parameter. */
4337
4338 static tree
4339 decl_class_context (tree decl)
4340 {
4341 tree context = NULL_TREE;
4342
4343 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4344 context = DECL_CONTEXT (decl);
4345 else
4346 context = TYPE_MAIN_VARIANT
4347 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4348
4349 if (context && !TYPE_P (context))
4350 context = NULL_TREE;
4351
4352 return context;
4353 }
4354 \f
4355 /* Add an attribute/value pair to a DIE. */
4356
4357 static inline void
4358 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4359 {
4360 /* Maybe this should be an assert? */
4361 if (die == NULL)
4362 return;
4363
4364 if (flag_checking)
4365 {
4366 /* Check we do not add duplicate attrs. Can't use get_AT here
4367 because that recurses to the specification/abstract origin DIE. */
4368 dw_attr_node *a;
4369 unsigned ix;
4370 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4371 gcc_assert (a->dw_attr != attr->dw_attr);
4372 }
4373
4374 vec_safe_reserve (die->die_attr, 1);
4375 vec_safe_push (die->die_attr, *attr);
4376 }
4377
4378 static inline enum dw_val_class
4379 AT_class (dw_attr_node *a)
4380 {
4381 return a->dw_attr_val.val_class;
4382 }
4383
4384 /* Return the index for any attribute that will be referenced with a
4385 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4386 are stored in dw_attr_val.v.val_str for reference counting
4387 pruning. */
4388
4389 static inline unsigned int
4390 AT_index (dw_attr_node *a)
4391 {
4392 if (AT_class (a) == dw_val_class_str)
4393 return a->dw_attr_val.v.val_str->index;
4394 else if (a->dw_attr_val.val_entry != NULL)
4395 return a->dw_attr_val.val_entry->index;
4396 return NOT_INDEXED;
4397 }
4398
4399 /* Add a flag value attribute to a DIE. */
4400
4401 static inline void
4402 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4403 {
4404 dw_attr_node attr;
4405
4406 attr.dw_attr = attr_kind;
4407 attr.dw_attr_val.val_class = dw_val_class_flag;
4408 attr.dw_attr_val.val_entry = NULL;
4409 attr.dw_attr_val.v.val_flag = flag;
4410 add_dwarf_attr (die, &attr);
4411 }
4412
4413 static inline unsigned
4414 AT_flag (dw_attr_node *a)
4415 {
4416 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4417 return a->dw_attr_val.v.val_flag;
4418 }
4419
4420 /* Add a signed integer attribute value to a DIE. */
4421
4422 static inline void
4423 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4424 {
4425 dw_attr_node attr;
4426
4427 attr.dw_attr = attr_kind;
4428 attr.dw_attr_val.val_class = dw_val_class_const;
4429 attr.dw_attr_val.val_entry = NULL;
4430 attr.dw_attr_val.v.val_int = int_val;
4431 add_dwarf_attr (die, &attr);
4432 }
4433
4434 static inline HOST_WIDE_INT
4435 AT_int (dw_attr_node *a)
4436 {
4437 gcc_assert (a && (AT_class (a) == dw_val_class_const
4438 || AT_class (a) == dw_val_class_const_implicit));
4439 return a->dw_attr_val.v.val_int;
4440 }
4441
4442 /* Add an unsigned integer attribute value to a DIE. */
4443
4444 static inline void
4445 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4446 unsigned HOST_WIDE_INT unsigned_val)
4447 {
4448 dw_attr_node attr;
4449
4450 attr.dw_attr = attr_kind;
4451 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4452 attr.dw_attr_val.val_entry = NULL;
4453 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4454 add_dwarf_attr (die, &attr);
4455 }
4456
4457 static inline unsigned HOST_WIDE_INT
4458 AT_unsigned (dw_attr_node *a)
4459 {
4460 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4461 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4462 return a->dw_attr_val.v.val_unsigned;
4463 }
4464
4465 /* Add an unsigned wide integer attribute value to a DIE. */
4466
4467 static inline void
4468 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4469 const wide_int& w)
4470 {
4471 dw_attr_node attr;
4472
4473 attr.dw_attr = attr_kind;
4474 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4475 attr.dw_attr_val.val_entry = NULL;
4476 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4477 *attr.dw_attr_val.v.val_wide = w;
4478 add_dwarf_attr (die, &attr);
4479 }
4480
4481 /* Add an unsigned double integer attribute value to a DIE. */
4482
4483 static inline void
4484 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4485 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4486 {
4487 dw_attr_node attr;
4488
4489 attr.dw_attr = attr_kind;
4490 attr.dw_attr_val.val_class = dw_val_class_const_double;
4491 attr.dw_attr_val.val_entry = NULL;
4492 attr.dw_attr_val.v.val_double.high = high;
4493 attr.dw_attr_val.v.val_double.low = low;
4494 add_dwarf_attr (die, &attr);
4495 }
4496
4497 /* Add a floating point attribute value to a DIE and return it. */
4498
4499 static inline void
4500 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4501 unsigned int length, unsigned int elt_size, unsigned char *array)
4502 {
4503 dw_attr_node attr;
4504
4505 attr.dw_attr = attr_kind;
4506 attr.dw_attr_val.val_class = dw_val_class_vec;
4507 attr.dw_attr_val.val_entry = NULL;
4508 attr.dw_attr_val.v.val_vec.length = length;
4509 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4510 attr.dw_attr_val.v.val_vec.array = array;
4511 add_dwarf_attr (die, &attr);
4512 }
4513
4514 /* Add an 8-byte data attribute value to a DIE. */
4515
4516 static inline void
4517 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4518 unsigned char data8[8])
4519 {
4520 dw_attr_node attr;
4521
4522 attr.dw_attr = attr_kind;
4523 attr.dw_attr_val.val_class = dw_val_class_data8;
4524 attr.dw_attr_val.val_entry = NULL;
4525 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4526 add_dwarf_attr (die, &attr);
4527 }
4528
4529 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4530 dwarf_split_debug_info, address attributes in dies destined for the
4531 final executable have force_direct set to avoid using indexed
4532 references. */
4533
4534 static inline void
4535 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4536 bool force_direct)
4537 {
4538 dw_attr_node attr;
4539 char * lbl_id;
4540
4541 lbl_id = xstrdup (lbl_low);
4542 attr.dw_attr = DW_AT_low_pc;
4543 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4544 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4545 if (dwarf_split_debug_info && !force_direct)
4546 attr.dw_attr_val.val_entry
4547 = add_addr_table_entry (lbl_id, ate_kind_label);
4548 else
4549 attr.dw_attr_val.val_entry = NULL;
4550 add_dwarf_attr (die, &attr);
4551
4552 attr.dw_attr = DW_AT_high_pc;
4553 if (dwarf_version < 4)
4554 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4555 else
4556 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4557 lbl_id = xstrdup (lbl_high);
4558 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4559 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4560 && dwarf_split_debug_info && !force_direct)
4561 attr.dw_attr_val.val_entry
4562 = add_addr_table_entry (lbl_id, ate_kind_label);
4563 else
4564 attr.dw_attr_val.val_entry = NULL;
4565 add_dwarf_attr (die, &attr);
4566 }
4567
4568 /* Hash and equality functions for debug_str_hash. */
4569
4570 hashval_t
4571 indirect_string_hasher::hash (indirect_string_node *x)
4572 {
4573 return htab_hash_string (x->str);
4574 }
4575
4576 bool
4577 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4578 {
4579 return strcmp (x1->str, x2) == 0;
4580 }
4581
4582 /* Add STR to the given string hash table. */
4583
4584 static struct indirect_string_node *
4585 find_AT_string_in_table (const char *str,
4586 hash_table<indirect_string_hasher> *table)
4587 {
4588 struct indirect_string_node *node;
4589
4590 indirect_string_node **slot
4591 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4592 if (*slot == NULL)
4593 {
4594 node = ggc_cleared_alloc<indirect_string_node> ();
4595 node->str = ggc_strdup (str);
4596 *slot = node;
4597 }
4598 else
4599 node = *slot;
4600
4601 node->refcount++;
4602 return node;
4603 }
4604
4605 /* Add STR to the indirect string hash table. */
4606
4607 static struct indirect_string_node *
4608 find_AT_string (const char *str)
4609 {
4610 if (! debug_str_hash)
4611 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4612
4613 return find_AT_string_in_table (str, debug_str_hash);
4614 }
4615
4616 /* Add a string attribute value to a DIE. */
4617
4618 static inline void
4619 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4620 {
4621 dw_attr_node attr;
4622 struct indirect_string_node *node;
4623
4624 node = find_AT_string (str);
4625
4626 attr.dw_attr = attr_kind;
4627 attr.dw_attr_val.val_class = dw_val_class_str;
4628 attr.dw_attr_val.val_entry = NULL;
4629 attr.dw_attr_val.v.val_str = node;
4630 add_dwarf_attr (die, &attr);
4631 }
4632
4633 static inline const char *
4634 AT_string (dw_attr_node *a)
4635 {
4636 gcc_assert (a && AT_class (a) == dw_val_class_str);
4637 return a->dw_attr_val.v.val_str->str;
4638 }
4639
4640 /* Call this function directly to bypass AT_string_form's logic to put
4641 the string inline in the die. */
4642
4643 static void
4644 set_indirect_string (struct indirect_string_node *node)
4645 {
4646 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4647 /* Already indirect is a no op. */
4648 if (node->form == DW_FORM_strp
4649 || node->form == DW_FORM_line_strp
4650 || node->form == DW_FORM_GNU_str_index)
4651 {
4652 gcc_assert (node->label);
4653 return;
4654 }
4655 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4656 ++dw2_string_counter;
4657 node->label = xstrdup (label);
4658
4659 if (!dwarf_split_debug_info)
4660 {
4661 node->form = DW_FORM_strp;
4662 node->index = NOT_INDEXED;
4663 }
4664 else
4665 {
4666 node->form = DW_FORM_GNU_str_index;
4667 node->index = NO_INDEX_ASSIGNED;
4668 }
4669 }
4670
4671 /* A helper function for dwarf2out_finish, called to reset indirect
4672 string decisions done for early LTO dwarf output before fat object
4673 dwarf output. */
4674
4675 int
4676 reset_indirect_string (indirect_string_node **h, void *)
4677 {
4678 struct indirect_string_node *node = *h;
4679 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4680 {
4681 free (node->label);
4682 node->label = NULL;
4683 node->form = (dwarf_form) 0;
4684 node->index = 0;
4685 }
4686 return 1;
4687 }
4688
4689 /* Find out whether a string should be output inline in DIE
4690 or out-of-line in .debug_str section. */
4691
4692 static enum dwarf_form
4693 find_string_form (struct indirect_string_node *node)
4694 {
4695 unsigned int len;
4696
4697 if (node->form)
4698 return node->form;
4699
4700 len = strlen (node->str) + 1;
4701
4702 /* If the string is shorter or equal to the size of the reference, it is
4703 always better to put it inline. */
4704 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4705 return node->form = DW_FORM_string;
4706
4707 /* If we cannot expect the linker to merge strings in .debug_str
4708 section, only put it into .debug_str if it is worth even in this
4709 single module. */
4710 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4711 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4712 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4713 return node->form = DW_FORM_string;
4714
4715 set_indirect_string (node);
4716
4717 return node->form;
4718 }
4719
4720 /* Find out whether the string referenced from the attribute should be
4721 output inline in DIE or out-of-line in .debug_str section. */
4722
4723 static enum dwarf_form
4724 AT_string_form (dw_attr_node *a)
4725 {
4726 gcc_assert (a && AT_class (a) == dw_val_class_str);
4727 return find_string_form (a->dw_attr_val.v.val_str);
4728 }
4729
4730 /* Add a DIE reference attribute value to a DIE. */
4731
4732 static inline void
4733 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4734 {
4735 dw_attr_node attr;
4736 gcc_checking_assert (targ_die != NULL);
4737
4738 /* With LTO we can end up trying to reference something we didn't create
4739 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4740 if (targ_die == NULL)
4741 return;
4742
4743 attr.dw_attr = attr_kind;
4744 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4745 attr.dw_attr_val.val_entry = NULL;
4746 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4747 attr.dw_attr_val.v.val_die_ref.external = 0;
4748 add_dwarf_attr (die, &attr);
4749 }
4750
4751 /* Change DIE reference REF to point to NEW_DIE instead. */
4752
4753 static inline void
4754 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4755 {
4756 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4757 ref->dw_attr_val.v.val_die_ref.die = new_die;
4758 ref->dw_attr_val.v.val_die_ref.external = 0;
4759 }
4760
4761 /* Add an AT_specification attribute to a DIE, and also make the back
4762 pointer from the specification to the definition. */
4763
4764 static inline void
4765 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4766 {
4767 add_AT_die_ref (die, DW_AT_specification, targ_die);
4768 gcc_assert (!targ_die->die_definition);
4769 targ_die->die_definition = die;
4770 }
4771
4772 static inline dw_die_ref
4773 AT_ref (dw_attr_node *a)
4774 {
4775 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4776 return a->dw_attr_val.v.val_die_ref.die;
4777 }
4778
4779 static inline int
4780 AT_ref_external (dw_attr_node *a)
4781 {
4782 if (a && AT_class (a) == dw_val_class_die_ref)
4783 return a->dw_attr_val.v.val_die_ref.external;
4784
4785 return 0;
4786 }
4787
4788 static inline void
4789 set_AT_ref_external (dw_attr_node *a, int i)
4790 {
4791 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4792 a->dw_attr_val.v.val_die_ref.external = i;
4793 }
4794
4795 /* Add an FDE reference attribute value to a DIE. */
4796
4797 static inline void
4798 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4799 {
4800 dw_attr_node attr;
4801
4802 attr.dw_attr = attr_kind;
4803 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4804 attr.dw_attr_val.val_entry = NULL;
4805 attr.dw_attr_val.v.val_fde_index = targ_fde;
4806 add_dwarf_attr (die, &attr);
4807 }
4808
4809 /* Add a location description attribute value to a DIE. */
4810
4811 static inline void
4812 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4813 {
4814 dw_attr_node attr;
4815
4816 attr.dw_attr = attr_kind;
4817 attr.dw_attr_val.val_class = dw_val_class_loc;
4818 attr.dw_attr_val.val_entry = NULL;
4819 attr.dw_attr_val.v.val_loc = loc;
4820 add_dwarf_attr (die, &attr);
4821 }
4822
4823 static inline dw_loc_descr_ref
4824 AT_loc (dw_attr_node *a)
4825 {
4826 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4827 return a->dw_attr_val.v.val_loc;
4828 }
4829
4830 static inline void
4831 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4832 {
4833 dw_attr_node attr;
4834
4835 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4836 return;
4837
4838 attr.dw_attr = attr_kind;
4839 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4840 attr.dw_attr_val.val_entry = NULL;
4841 attr.dw_attr_val.v.val_loc_list = loc_list;
4842 add_dwarf_attr (die, &attr);
4843 have_location_lists = true;
4844 }
4845
4846 static inline dw_loc_list_ref
4847 AT_loc_list (dw_attr_node *a)
4848 {
4849 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4850 return a->dw_attr_val.v.val_loc_list;
4851 }
4852
4853 /* Add a view list attribute to DIE. It must have a DW_AT_location
4854 attribute, because the view list complements the location list. */
4855
4856 static inline void
4857 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4858 {
4859 dw_attr_node attr;
4860
4861 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4862 return;
4863
4864 attr.dw_attr = attr_kind;
4865 attr.dw_attr_val.val_class = dw_val_class_view_list;
4866 attr.dw_attr_val.val_entry = NULL;
4867 attr.dw_attr_val.v.val_view_list = die;
4868 add_dwarf_attr (die, &attr);
4869 gcc_checking_assert (get_AT (die, DW_AT_location));
4870 gcc_assert (have_location_lists);
4871 }
4872
4873 /* Return a pointer to the location list referenced by the attribute.
4874 If the named attribute is a view list, look up the corresponding
4875 DW_AT_location attribute and return its location list. */
4876
4877 static inline dw_loc_list_ref *
4878 AT_loc_list_ptr (dw_attr_node *a)
4879 {
4880 gcc_assert (a);
4881 switch (AT_class (a))
4882 {
4883 case dw_val_class_loc_list:
4884 return &a->dw_attr_val.v.val_loc_list;
4885 case dw_val_class_view_list:
4886 {
4887 dw_attr_node *l;
4888 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4889 if (!l)
4890 return NULL;
4891 gcc_checking_assert (l + 1 == a);
4892 return AT_loc_list_ptr (l);
4893 }
4894 default:
4895 gcc_unreachable ();
4896 }
4897 }
4898
4899 /* Return the location attribute value associated with a view list
4900 attribute value. */
4901
4902 static inline dw_val_node *
4903 view_list_to_loc_list_val_node (dw_val_node *val)
4904 {
4905 gcc_assert (val->val_class == dw_val_class_view_list);
4906 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4907 if (!loc)
4908 return NULL;
4909 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4910 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4911 return &loc->dw_attr_val;
4912 }
4913
4914 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4915 {
4916 static hashval_t hash (addr_table_entry *);
4917 static bool equal (addr_table_entry *, addr_table_entry *);
4918 };
4919
4920 /* Table of entries into the .debug_addr section. */
4921
4922 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4923
4924 /* Hash an address_table_entry. */
4925
4926 hashval_t
4927 addr_hasher::hash (addr_table_entry *a)
4928 {
4929 inchash::hash hstate;
4930 switch (a->kind)
4931 {
4932 case ate_kind_rtx:
4933 hstate.add_int (0);
4934 break;
4935 case ate_kind_rtx_dtprel:
4936 hstate.add_int (1);
4937 break;
4938 case ate_kind_label:
4939 return htab_hash_string (a->addr.label);
4940 default:
4941 gcc_unreachable ();
4942 }
4943 inchash::add_rtx (a->addr.rtl, hstate);
4944 return hstate.end ();
4945 }
4946
4947 /* Determine equality for two address_table_entries. */
4948
4949 bool
4950 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4951 {
4952 if (a1->kind != a2->kind)
4953 return 0;
4954 switch (a1->kind)
4955 {
4956 case ate_kind_rtx:
4957 case ate_kind_rtx_dtprel:
4958 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4959 case ate_kind_label:
4960 return strcmp (a1->addr.label, a2->addr.label) == 0;
4961 default:
4962 gcc_unreachable ();
4963 }
4964 }
4965
4966 /* Initialize an addr_table_entry. */
4967
4968 void
4969 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4970 {
4971 e->kind = kind;
4972 switch (kind)
4973 {
4974 case ate_kind_rtx:
4975 case ate_kind_rtx_dtprel:
4976 e->addr.rtl = (rtx) addr;
4977 break;
4978 case ate_kind_label:
4979 e->addr.label = (char *) addr;
4980 break;
4981 }
4982 e->refcount = 0;
4983 e->index = NO_INDEX_ASSIGNED;
4984 }
4985
4986 /* Add attr to the address table entry to the table. Defer setting an
4987 index until output time. */
4988
4989 static addr_table_entry *
4990 add_addr_table_entry (void *addr, enum ate_kind kind)
4991 {
4992 addr_table_entry *node;
4993 addr_table_entry finder;
4994
4995 gcc_assert (dwarf_split_debug_info);
4996 if (! addr_index_table)
4997 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4998 init_addr_table_entry (&finder, kind, addr);
4999 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5000
5001 if (*slot == HTAB_EMPTY_ENTRY)
5002 {
5003 node = ggc_cleared_alloc<addr_table_entry> ();
5004 init_addr_table_entry (node, kind, addr);
5005 *slot = node;
5006 }
5007 else
5008 node = *slot;
5009
5010 node->refcount++;
5011 return node;
5012 }
5013
5014 /* Remove an entry from the addr table by decrementing its refcount.
5015 Strictly, decrementing the refcount would be enough, but the
5016 assertion that the entry is actually in the table has found
5017 bugs. */
5018
5019 static void
5020 remove_addr_table_entry (addr_table_entry *entry)
5021 {
5022 gcc_assert (dwarf_split_debug_info && addr_index_table);
5023 /* After an index is assigned, the table is frozen. */
5024 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5025 entry->refcount--;
5026 }
5027
5028 /* Given a location list, remove all addresses it refers to from the
5029 address_table. */
5030
5031 static void
5032 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5033 {
5034 for (; descr; descr = descr->dw_loc_next)
5035 if (descr->dw_loc_oprnd1.val_entry != NULL)
5036 {
5037 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5038 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5039 }
5040 }
5041
5042 /* A helper function for dwarf2out_finish called through
5043 htab_traverse. Assign an addr_table_entry its index. All entries
5044 must be collected into the table when this function is called,
5045 because the indexing code relies on htab_traverse to traverse nodes
5046 in the same order for each run. */
5047
5048 int
5049 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5050 {
5051 addr_table_entry *node = *h;
5052
5053 /* Don't index unreferenced nodes. */
5054 if (node->refcount == 0)
5055 return 1;
5056
5057 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5058 node->index = *index;
5059 *index += 1;
5060
5061 return 1;
5062 }
5063
5064 /* Add an address constant attribute value to a DIE. When using
5065 dwarf_split_debug_info, address attributes in dies destined for the
5066 final executable should be direct references--setting the parameter
5067 force_direct ensures this behavior. */
5068
5069 static inline void
5070 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5071 bool force_direct)
5072 {
5073 dw_attr_node attr;
5074
5075 attr.dw_attr = attr_kind;
5076 attr.dw_attr_val.val_class = dw_val_class_addr;
5077 attr.dw_attr_val.v.val_addr = addr;
5078 if (dwarf_split_debug_info && !force_direct)
5079 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5080 else
5081 attr.dw_attr_val.val_entry = NULL;
5082 add_dwarf_attr (die, &attr);
5083 }
5084
5085 /* Get the RTX from to an address DIE attribute. */
5086
5087 static inline rtx
5088 AT_addr (dw_attr_node *a)
5089 {
5090 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5091 return a->dw_attr_val.v.val_addr;
5092 }
5093
5094 /* Add a file attribute value to a DIE. */
5095
5096 static inline void
5097 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5098 struct dwarf_file_data *fd)
5099 {
5100 dw_attr_node attr;
5101
5102 attr.dw_attr = attr_kind;
5103 attr.dw_attr_val.val_class = dw_val_class_file;
5104 attr.dw_attr_val.val_entry = NULL;
5105 attr.dw_attr_val.v.val_file = fd;
5106 add_dwarf_attr (die, &attr);
5107 }
5108
5109 /* Get the dwarf_file_data from a file DIE attribute. */
5110
5111 static inline struct dwarf_file_data *
5112 AT_file (dw_attr_node *a)
5113 {
5114 gcc_assert (a && (AT_class (a) == dw_val_class_file
5115 || AT_class (a) == dw_val_class_file_implicit));
5116 return a->dw_attr_val.v.val_file;
5117 }
5118
5119 /* Add a vms delta attribute value to a DIE. */
5120
5121 static inline void
5122 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5123 const char *lbl1, const char *lbl2)
5124 {
5125 dw_attr_node attr;
5126
5127 attr.dw_attr = attr_kind;
5128 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5129 attr.dw_attr_val.val_entry = NULL;
5130 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5131 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5132 add_dwarf_attr (die, &attr);
5133 }
5134
5135 /* Add a symbolic view identifier attribute value to a DIE. */
5136
5137 static inline void
5138 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5139 const char *view_label)
5140 {
5141 dw_attr_node attr;
5142
5143 attr.dw_attr = attr_kind;
5144 attr.dw_attr_val.val_class = dw_val_class_symview;
5145 attr.dw_attr_val.val_entry = NULL;
5146 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5147 add_dwarf_attr (die, &attr);
5148 }
5149
5150 /* Add a label identifier attribute value to a DIE. */
5151
5152 static inline void
5153 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5154 const char *lbl_id)
5155 {
5156 dw_attr_node attr;
5157
5158 attr.dw_attr = attr_kind;
5159 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5160 attr.dw_attr_val.val_entry = NULL;
5161 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5162 if (dwarf_split_debug_info)
5163 attr.dw_attr_val.val_entry
5164 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5165 ate_kind_label);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a section offset attribute value to a DIE, an offset into the
5170 debug_line section. */
5171
5172 static inline void
5173 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5174 const char *label)
5175 {
5176 dw_attr_node attr;
5177
5178 attr.dw_attr = attr_kind;
5179 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5180 attr.dw_attr_val.val_entry = NULL;
5181 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5182 add_dwarf_attr (die, &attr);
5183 }
5184
5185 /* Add a section offset attribute value to a DIE, an offset into the
5186 debug_loclists section. */
5187
5188 static inline void
5189 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5190 const char *label)
5191 {
5192 dw_attr_node attr;
5193
5194 attr.dw_attr = attr_kind;
5195 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5196 attr.dw_attr_val.val_entry = NULL;
5197 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5198 add_dwarf_attr (die, &attr);
5199 }
5200
5201 /* Add a section offset attribute value to a DIE, an offset into the
5202 debug_macinfo section. */
5203
5204 static inline void
5205 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5206 const char *label)
5207 {
5208 dw_attr_node attr;
5209
5210 attr.dw_attr = attr_kind;
5211 attr.dw_attr_val.val_class = dw_val_class_macptr;
5212 attr.dw_attr_val.val_entry = NULL;
5213 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5214 add_dwarf_attr (die, &attr);
5215 }
5216
5217 /* Add an offset attribute value to a DIE. */
5218
5219 static inline void
5220 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5221 unsigned HOST_WIDE_INT offset)
5222 {
5223 dw_attr_node attr;
5224
5225 attr.dw_attr = attr_kind;
5226 attr.dw_attr_val.val_class = dw_val_class_offset;
5227 attr.dw_attr_val.val_entry = NULL;
5228 attr.dw_attr_val.v.val_offset = offset;
5229 add_dwarf_attr (die, &attr);
5230 }
5231
5232 /* Add a range_list attribute value to a DIE. When using
5233 dwarf_split_debug_info, address attributes in dies destined for the
5234 final executable should be direct references--setting the parameter
5235 force_direct ensures this behavior. */
5236
5237 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5238 #define RELOCATED_OFFSET (NULL)
5239
5240 static void
5241 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5242 long unsigned int offset, bool force_direct)
5243 {
5244 dw_attr_node attr;
5245
5246 attr.dw_attr = attr_kind;
5247 attr.dw_attr_val.val_class = dw_val_class_range_list;
5248 /* For the range_list attribute, use val_entry to store whether the
5249 offset should follow split-debug-info or normal semantics. This
5250 value is read in output_range_list_offset. */
5251 if (dwarf_split_debug_info && !force_direct)
5252 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5253 else
5254 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5255 attr.dw_attr_val.v.val_offset = offset;
5256 add_dwarf_attr (die, &attr);
5257 }
5258
5259 /* Return the start label of a delta attribute. */
5260
5261 static inline const char *
5262 AT_vms_delta1 (dw_attr_node *a)
5263 {
5264 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5265 return a->dw_attr_val.v.val_vms_delta.lbl1;
5266 }
5267
5268 /* Return the end label of a delta attribute. */
5269
5270 static inline const char *
5271 AT_vms_delta2 (dw_attr_node *a)
5272 {
5273 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5274 return a->dw_attr_val.v.val_vms_delta.lbl2;
5275 }
5276
5277 static inline const char *
5278 AT_lbl (dw_attr_node *a)
5279 {
5280 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5281 || AT_class (a) == dw_val_class_lineptr
5282 || AT_class (a) == dw_val_class_macptr
5283 || AT_class (a) == dw_val_class_loclistsptr
5284 || AT_class (a) == dw_val_class_high_pc));
5285 return a->dw_attr_val.v.val_lbl_id;
5286 }
5287
5288 /* Get the attribute of type attr_kind. */
5289
5290 static dw_attr_node *
5291 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5292 {
5293 dw_attr_node *a;
5294 unsigned ix;
5295 dw_die_ref spec = NULL;
5296
5297 if (! die)
5298 return NULL;
5299
5300 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5301 if (a->dw_attr == attr_kind)
5302 return a;
5303 else if (a->dw_attr == DW_AT_specification
5304 || a->dw_attr == DW_AT_abstract_origin)
5305 spec = AT_ref (a);
5306
5307 if (spec)
5308 return get_AT (spec, attr_kind);
5309
5310 return NULL;
5311 }
5312
5313 /* Returns the parent of the declaration of DIE. */
5314
5315 static dw_die_ref
5316 get_die_parent (dw_die_ref die)
5317 {
5318 dw_die_ref t;
5319
5320 if (!die)
5321 return NULL;
5322
5323 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5324 || (t = get_AT_ref (die, DW_AT_specification)))
5325 die = t;
5326
5327 return die->die_parent;
5328 }
5329
5330 /* Return the "low pc" attribute value, typically associated with a subprogram
5331 DIE. Return null if the "low pc" attribute is either not present, or if it
5332 cannot be represented as an assembler label identifier. */
5333
5334 static inline const char *
5335 get_AT_low_pc (dw_die_ref die)
5336 {
5337 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5338
5339 return a ? AT_lbl (a) : NULL;
5340 }
5341
5342 /* Return the "high pc" attribute value, typically associated with a subprogram
5343 DIE. Return null if the "high pc" attribute is either not present, or if it
5344 cannot be represented as an assembler label identifier. */
5345
5346 static inline const char *
5347 get_AT_hi_pc (dw_die_ref die)
5348 {
5349 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5350
5351 return a ? AT_lbl (a) : NULL;
5352 }
5353
5354 /* Return the value of the string attribute designated by ATTR_KIND, or
5355 NULL if it is not present. */
5356
5357 static inline const char *
5358 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5359 {
5360 dw_attr_node *a = get_AT (die, attr_kind);
5361
5362 return a ? AT_string (a) : NULL;
5363 }
5364
5365 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5366 if it is not present. */
5367
5368 static inline int
5369 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5370 {
5371 dw_attr_node *a = get_AT (die, attr_kind);
5372
5373 return a ? AT_flag (a) : 0;
5374 }
5375
5376 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5377 if it is not present. */
5378
5379 static inline unsigned
5380 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5381 {
5382 dw_attr_node *a = get_AT (die, attr_kind);
5383
5384 return a ? AT_unsigned (a) : 0;
5385 }
5386
5387 static inline dw_die_ref
5388 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5389 {
5390 dw_attr_node *a = get_AT (die, attr_kind);
5391
5392 return a ? AT_ref (a) : NULL;
5393 }
5394
5395 static inline struct dwarf_file_data *
5396 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5397 {
5398 dw_attr_node *a = get_AT (die, attr_kind);
5399
5400 return a ? AT_file (a) : NULL;
5401 }
5402
5403 /* Return TRUE if the language is C++. */
5404
5405 static inline bool
5406 is_cxx (void)
5407 {
5408 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5409
5410 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5411 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5412 }
5413
5414 /* Return TRUE if DECL was created by the C++ frontend. */
5415
5416 static bool
5417 is_cxx (const_tree decl)
5418 {
5419 if (in_lto_p)
5420 {
5421 const_tree context = get_ultimate_context (decl);
5422 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5423 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5424 }
5425 return is_cxx ();
5426 }
5427
5428 /* Return TRUE if the language is Fortran. */
5429
5430 static inline bool
5431 is_fortran (void)
5432 {
5433 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5434
5435 return (lang == DW_LANG_Fortran77
5436 || lang == DW_LANG_Fortran90
5437 || lang == DW_LANG_Fortran95
5438 || lang == DW_LANG_Fortran03
5439 || lang == DW_LANG_Fortran08);
5440 }
5441
5442 static inline bool
5443 is_fortran (const_tree decl)
5444 {
5445 if (in_lto_p)
5446 {
5447 const_tree context = get_ultimate_context (decl);
5448 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5449 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5450 "GNU Fortran", 11) == 0
5451 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5452 "GNU F77") == 0);
5453 }
5454 return is_fortran ();
5455 }
5456
5457 /* Return TRUE if the language is Ada. */
5458
5459 static inline bool
5460 is_ada (void)
5461 {
5462 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5463
5464 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5465 }
5466
5467 /* Remove the specified attribute if present. Return TRUE if removal
5468 was successful. */
5469
5470 static bool
5471 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5472 {
5473 dw_attr_node *a;
5474 unsigned ix;
5475
5476 if (! die)
5477 return false;
5478
5479 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5480 if (a->dw_attr == attr_kind)
5481 {
5482 if (AT_class (a) == dw_val_class_str)
5483 if (a->dw_attr_val.v.val_str->refcount)
5484 a->dw_attr_val.v.val_str->refcount--;
5485
5486 /* vec::ordered_remove should help reduce the number of abbrevs
5487 that are needed. */
5488 die->die_attr->ordered_remove (ix);
5489 return true;
5490 }
5491 return false;
5492 }
5493
5494 /* Remove CHILD from its parent. PREV must have the property that
5495 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5496
5497 static void
5498 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5499 {
5500 gcc_assert (child->die_parent == prev->die_parent);
5501 gcc_assert (prev->die_sib == child);
5502 if (prev == child)
5503 {
5504 gcc_assert (child->die_parent->die_child == child);
5505 prev = NULL;
5506 }
5507 else
5508 prev->die_sib = child->die_sib;
5509 if (child->die_parent->die_child == child)
5510 child->die_parent->die_child = prev;
5511 child->die_sib = NULL;
5512 }
5513
5514 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5515 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5516
5517 static void
5518 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5519 {
5520 dw_die_ref parent = old_child->die_parent;
5521
5522 gcc_assert (parent == prev->die_parent);
5523 gcc_assert (prev->die_sib == old_child);
5524
5525 new_child->die_parent = parent;
5526 if (prev == old_child)
5527 {
5528 gcc_assert (parent->die_child == old_child);
5529 new_child->die_sib = new_child;
5530 }
5531 else
5532 {
5533 prev->die_sib = new_child;
5534 new_child->die_sib = old_child->die_sib;
5535 }
5536 if (old_child->die_parent->die_child == old_child)
5537 old_child->die_parent->die_child = new_child;
5538 old_child->die_sib = NULL;
5539 }
5540
5541 /* Move all children from OLD_PARENT to NEW_PARENT. */
5542
5543 static void
5544 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5545 {
5546 dw_die_ref c;
5547 new_parent->die_child = old_parent->die_child;
5548 old_parent->die_child = NULL;
5549 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5550 }
5551
5552 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5553 matches TAG. */
5554
5555 static void
5556 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5557 {
5558 dw_die_ref c;
5559
5560 c = die->die_child;
5561 if (c) do {
5562 dw_die_ref prev = c;
5563 c = c->die_sib;
5564 while (c->die_tag == tag)
5565 {
5566 remove_child_with_prev (c, prev);
5567 c->die_parent = NULL;
5568 /* Might have removed every child. */
5569 if (die->die_child == NULL)
5570 return;
5571 c = prev->die_sib;
5572 }
5573 } while (c != die->die_child);
5574 }
5575
5576 /* Add a CHILD_DIE as the last child of DIE. */
5577
5578 static void
5579 add_child_die (dw_die_ref die, dw_die_ref child_die)
5580 {
5581 /* FIXME this should probably be an assert. */
5582 if (! die || ! child_die)
5583 return;
5584 gcc_assert (die != child_die);
5585
5586 child_die->die_parent = die;
5587 if (die->die_child)
5588 {
5589 child_die->die_sib = die->die_child->die_sib;
5590 die->die_child->die_sib = child_die;
5591 }
5592 else
5593 child_die->die_sib = child_die;
5594 die->die_child = child_die;
5595 }
5596
5597 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5598
5599 static void
5600 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5601 dw_die_ref after_die)
5602 {
5603 gcc_assert (die
5604 && child_die
5605 && after_die
5606 && die->die_child
5607 && die != child_die);
5608
5609 child_die->die_parent = die;
5610 child_die->die_sib = after_die->die_sib;
5611 after_die->die_sib = child_die;
5612 if (die->die_child == after_die)
5613 die->die_child = child_die;
5614 }
5615
5616 /* Unassociate CHILD from its parent, and make its parent be
5617 NEW_PARENT. */
5618
5619 static void
5620 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5621 {
5622 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5623 if (p->die_sib == child)
5624 {
5625 remove_child_with_prev (child, p);
5626 break;
5627 }
5628 add_child_die (new_parent, child);
5629 }
5630
5631 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5632 is the specification, to the end of PARENT's list of children.
5633 This is done by removing and re-adding it. */
5634
5635 static void
5636 splice_child_die (dw_die_ref parent, dw_die_ref child)
5637 {
5638 /* We want the declaration DIE from inside the class, not the
5639 specification DIE at toplevel. */
5640 if (child->die_parent != parent)
5641 {
5642 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5643
5644 if (tmp)
5645 child = tmp;
5646 }
5647
5648 gcc_assert (child->die_parent == parent
5649 || (child->die_parent
5650 == get_AT_ref (parent, DW_AT_specification)));
5651
5652 reparent_child (child, parent);
5653 }
5654
5655 /* Create and return a new die with TAG_VALUE as tag. */
5656
5657 static inline dw_die_ref
5658 new_die_raw (enum dwarf_tag tag_value)
5659 {
5660 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5661 die->die_tag = tag_value;
5662 return die;
5663 }
5664
5665 /* Create and return a new die with a parent of PARENT_DIE. If
5666 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5667 associated tree T must be supplied to determine parenthood
5668 later. */
5669
5670 static inline dw_die_ref
5671 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5672 {
5673 dw_die_ref die = new_die_raw (tag_value);
5674
5675 if (parent_die != NULL)
5676 add_child_die (parent_die, die);
5677 else
5678 {
5679 limbo_die_node *limbo_node;
5680
5681 /* No DIEs created after early dwarf should end up in limbo,
5682 because the limbo list should not persist past LTO
5683 streaming. */
5684 if (tag_value != DW_TAG_compile_unit
5685 /* These are allowed because they're generated while
5686 breaking out COMDAT units late. */
5687 && tag_value != DW_TAG_type_unit
5688 && tag_value != DW_TAG_skeleton_unit
5689 && !early_dwarf
5690 /* Allow nested functions to live in limbo because they will
5691 only temporarily live there, as decls_for_scope will fix
5692 them up. */
5693 && (TREE_CODE (t) != FUNCTION_DECL
5694 || !decl_function_context (t))
5695 /* Same as nested functions above but for types. Types that
5696 are local to a function will be fixed in
5697 decls_for_scope. */
5698 && (!RECORD_OR_UNION_TYPE_P (t)
5699 || !TYPE_CONTEXT (t)
5700 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5701 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5702 especially in the ltrans stage, but once we implement LTO
5703 dwarf streaming, we should remove this exception. */
5704 && !in_lto_p)
5705 {
5706 fprintf (stderr, "symbol ended up in limbo too late:");
5707 debug_generic_stmt (t);
5708 gcc_unreachable ();
5709 }
5710
5711 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5712 limbo_node->die = die;
5713 limbo_node->created_for = t;
5714 limbo_node->next = limbo_die_list;
5715 limbo_die_list = limbo_node;
5716 }
5717
5718 return die;
5719 }
5720
5721 /* Return the DIE associated with the given type specifier. */
5722
5723 static inline dw_die_ref
5724 lookup_type_die (tree type)
5725 {
5726 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5727 if (die && die->removed)
5728 {
5729 TYPE_SYMTAB_DIE (type) = NULL;
5730 return NULL;
5731 }
5732 return die;
5733 }
5734
5735 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5736 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5737 anonymous type instead the one of the naming typedef. */
5738
5739 static inline dw_die_ref
5740 strip_naming_typedef (tree type, dw_die_ref type_die)
5741 {
5742 if (type
5743 && TREE_CODE (type) == RECORD_TYPE
5744 && type_die
5745 && type_die->die_tag == DW_TAG_typedef
5746 && is_naming_typedef_decl (TYPE_NAME (type)))
5747 type_die = get_AT_ref (type_die, DW_AT_type);
5748 return type_die;
5749 }
5750
5751 /* Like lookup_type_die, but if type is an anonymous type named by a
5752 typedef[1], return the DIE of the anonymous type instead the one of
5753 the naming typedef. This is because in gen_typedef_die, we did
5754 equate the anonymous struct named by the typedef with the DIE of
5755 the naming typedef. So by default, lookup_type_die on an anonymous
5756 struct yields the DIE of the naming typedef.
5757
5758 [1]: Read the comment of is_naming_typedef_decl to learn about what
5759 a naming typedef is. */
5760
5761 static inline dw_die_ref
5762 lookup_type_die_strip_naming_typedef (tree type)
5763 {
5764 dw_die_ref die = lookup_type_die (type);
5765 return strip_naming_typedef (type, die);
5766 }
5767
5768 /* Equate a DIE to a given type specifier. */
5769
5770 static inline void
5771 equate_type_number_to_die (tree type, dw_die_ref type_die)
5772 {
5773 TYPE_SYMTAB_DIE (type) = type_die;
5774 }
5775
5776 /* Returns a hash value for X (which really is a die_struct). */
5777
5778 inline hashval_t
5779 decl_die_hasher::hash (die_node *x)
5780 {
5781 return (hashval_t) x->decl_id;
5782 }
5783
5784 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5785
5786 inline bool
5787 decl_die_hasher::equal (die_node *x, tree y)
5788 {
5789 return (x->decl_id == DECL_UID (y));
5790 }
5791
5792 /* Return the DIE associated with a given declaration. */
5793
5794 static inline dw_die_ref
5795 lookup_decl_die (tree decl)
5796 {
5797 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5798 NO_INSERT);
5799 if (!die)
5800 return NULL;
5801 if ((*die)->removed)
5802 {
5803 decl_die_table->clear_slot (die);
5804 return NULL;
5805 }
5806 return *die;
5807 }
5808
5809
5810 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5811 style reference. Return true if we found one refering to a DIE for
5812 DECL, otherwise return false. */
5813
5814 static bool
5815 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5816 unsigned HOST_WIDE_INT *off)
5817 {
5818 dw_die_ref die;
5819
5820 if (flag_wpa && !decl_die_table)
5821 return false;
5822
5823 if (TREE_CODE (decl) == BLOCK)
5824 die = BLOCK_DIE (decl);
5825 else
5826 die = lookup_decl_die (decl);
5827 if (!die)
5828 return false;
5829
5830 /* During WPA stage we currently use DIEs to store the
5831 decl <-> label + offset map. That's quite inefficient but it
5832 works for now. */
5833 if (flag_wpa)
5834 {
5835 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5836 if (!ref)
5837 {
5838 gcc_assert (die == comp_unit_die ());
5839 return false;
5840 }
5841 *off = ref->die_offset;
5842 *sym = ref->die_id.die_symbol;
5843 return true;
5844 }
5845
5846 /* Similar to get_ref_die_offset_label, but using the "correct"
5847 label. */
5848 *off = die->die_offset;
5849 while (die->die_parent)
5850 die = die->die_parent;
5851 /* For the containing CU DIE we compute a die_symbol in
5852 compute_comp_unit_symbol. */
5853 gcc_assert (die->die_tag == DW_TAG_compile_unit
5854 && die->die_id.die_symbol != NULL);
5855 *sym = die->die_id.die_symbol;
5856 return true;
5857 }
5858
5859 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5860
5861 static void
5862 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5863 const char *symbol, HOST_WIDE_INT offset)
5864 {
5865 /* Create a fake DIE that contains the reference. Don't use
5866 new_die because we don't want to end up in the limbo list. */
5867 dw_die_ref ref = new_die_raw (die->die_tag);
5868 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5869 ref->die_offset = offset;
5870 ref->with_offset = 1;
5871 add_AT_die_ref (die, attr_kind, ref);
5872 }
5873
5874 /* Create a DIE for DECL if required and add a reference to a DIE
5875 at SYMBOL + OFFSET which contains attributes dumped early. */
5876
5877 static void
5878 dwarf2out_register_external_die (tree decl, const char *sym,
5879 unsigned HOST_WIDE_INT off)
5880 {
5881 if (debug_info_level == DINFO_LEVEL_NONE)
5882 return;
5883
5884 if (flag_wpa && !decl_die_table)
5885 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5886
5887 dw_die_ref die
5888 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5889 gcc_assert (!die);
5890
5891 tree ctx;
5892 dw_die_ref parent = NULL;
5893 /* Need to lookup a DIE for the decls context - the containing
5894 function or translation unit. */
5895 if (TREE_CODE (decl) == BLOCK)
5896 {
5897 ctx = BLOCK_SUPERCONTEXT (decl);
5898 /* ??? We do not output DIEs for all scopes thus skip as
5899 many DIEs as needed. */
5900 while (TREE_CODE (ctx) == BLOCK
5901 && !BLOCK_DIE (ctx))
5902 ctx = BLOCK_SUPERCONTEXT (ctx);
5903 }
5904 else
5905 ctx = DECL_CONTEXT (decl);
5906 /* Peel types in the context stack. */
5907 while (ctx && TYPE_P (ctx))
5908 ctx = TYPE_CONTEXT (ctx);
5909 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5910 if (debug_info_level <= DINFO_LEVEL_TERSE)
5911 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5912 ctx = DECL_CONTEXT (ctx);
5913 if (ctx)
5914 {
5915 if (TREE_CODE (ctx) == BLOCK)
5916 parent = BLOCK_DIE (ctx);
5917 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5918 /* Keep the 1:1 association during WPA. */
5919 && !flag_wpa)
5920 /* Otherwise all late annotations go to the main CU which
5921 imports the original CUs. */
5922 parent = comp_unit_die ();
5923 else if (TREE_CODE (ctx) == FUNCTION_DECL
5924 && TREE_CODE (decl) != PARM_DECL
5925 && TREE_CODE (decl) != BLOCK)
5926 /* Leave function local entities parent determination to when
5927 we process scope vars. */
5928 ;
5929 else
5930 parent = lookup_decl_die (ctx);
5931 }
5932 else
5933 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5934 Handle this case gracefully by globalizing stuff. */
5935 parent = comp_unit_die ();
5936 /* Create a DIE "stub". */
5937 switch (TREE_CODE (decl))
5938 {
5939 case TRANSLATION_UNIT_DECL:
5940 if (! flag_wpa)
5941 {
5942 die = comp_unit_die ();
5943 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5944 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5945 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5946 to create a DIE for the original CUs. */
5947 return;
5948 }
5949 /* Keep the 1:1 association during WPA. */
5950 die = new_die (DW_TAG_compile_unit, NULL, decl);
5951 break;
5952 case NAMESPACE_DECL:
5953 if (is_fortran (decl))
5954 die = new_die (DW_TAG_module, parent, decl);
5955 else
5956 die = new_die (DW_TAG_namespace, parent, decl);
5957 break;
5958 case FUNCTION_DECL:
5959 die = new_die (DW_TAG_subprogram, parent, decl);
5960 break;
5961 case VAR_DECL:
5962 die = new_die (DW_TAG_variable, parent, decl);
5963 break;
5964 case RESULT_DECL:
5965 die = new_die (DW_TAG_variable, parent, decl);
5966 break;
5967 case PARM_DECL:
5968 die = new_die (DW_TAG_formal_parameter, parent, decl);
5969 break;
5970 case CONST_DECL:
5971 die = new_die (DW_TAG_constant, parent, decl);
5972 break;
5973 case LABEL_DECL:
5974 die = new_die (DW_TAG_label, parent, decl);
5975 break;
5976 case BLOCK:
5977 die = new_die (DW_TAG_lexical_block, parent, decl);
5978 break;
5979 default:
5980 gcc_unreachable ();
5981 }
5982 if (TREE_CODE (decl) == BLOCK)
5983 BLOCK_DIE (decl) = die;
5984 else
5985 equate_decl_number_to_die (decl, die);
5986
5987 /* Add a reference to the DIE providing early debug at $sym + off. */
5988 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5989 }
5990
5991 /* Returns a hash value for X (which really is a var_loc_list). */
5992
5993 inline hashval_t
5994 decl_loc_hasher::hash (var_loc_list *x)
5995 {
5996 return (hashval_t) x->decl_id;
5997 }
5998
5999 /* Return nonzero if decl_id of var_loc_list X is the same as
6000 UID of decl *Y. */
6001
6002 inline bool
6003 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6004 {
6005 return (x->decl_id == DECL_UID (y));
6006 }
6007
6008 /* Return the var_loc list associated with a given declaration. */
6009
6010 static inline var_loc_list *
6011 lookup_decl_loc (const_tree decl)
6012 {
6013 if (!decl_loc_table)
6014 return NULL;
6015 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6016 }
6017
6018 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6019
6020 inline hashval_t
6021 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6022 {
6023 return (hashval_t) x->decl_id;
6024 }
6025
6026 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6027 UID of decl *Y. */
6028
6029 inline bool
6030 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6031 {
6032 return (x->decl_id == DECL_UID (y));
6033 }
6034
6035 /* Equate a DIE to a particular declaration. */
6036
6037 static void
6038 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6039 {
6040 unsigned int decl_id = DECL_UID (decl);
6041
6042 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6043 decl_die->decl_id = decl_id;
6044 }
6045
6046 /* Return how many bits covers PIECE EXPR_LIST. */
6047
6048 static HOST_WIDE_INT
6049 decl_piece_bitsize (rtx piece)
6050 {
6051 int ret = (int) GET_MODE (piece);
6052 if (ret)
6053 return ret;
6054 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6055 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6056 return INTVAL (XEXP (XEXP (piece, 0), 0));
6057 }
6058
6059 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6060
6061 static rtx *
6062 decl_piece_varloc_ptr (rtx piece)
6063 {
6064 if ((int) GET_MODE (piece))
6065 return &XEXP (piece, 0);
6066 else
6067 return &XEXP (XEXP (piece, 0), 1);
6068 }
6069
6070 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6071 Next is the chain of following piece nodes. */
6072
6073 static rtx_expr_list *
6074 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6075 {
6076 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6077 return alloc_EXPR_LIST (bitsize, loc_note, next);
6078 else
6079 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6080 GEN_INT (bitsize),
6081 loc_note), next);
6082 }
6083
6084 /* Return rtx that should be stored into loc field for
6085 LOC_NOTE and BITPOS/BITSIZE. */
6086
6087 static rtx
6088 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6089 HOST_WIDE_INT bitsize)
6090 {
6091 if (bitsize != -1)
6092 {
6093 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6094 if (bitpos != 0)
6095 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6096 }
6097 return loc_note;
6098 }
6099
6100 /* This function either modifies location piece list *DEST in
6101 place (if SRC and INNER is NULL), or copies location piece list
6102 *SRC to *DEST while modifying it. Location BITPOS is modified
6103 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6104 not copied and if needed some padding around it is added.
6105 When modifying in place, DEST should point to EXPR_LIST where
6106 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6107 to the start of the whole list and INNER points to the EXPR_LIST
6108 where earlier pieces cover PIECE_BITPOS bits. */
6109
6110 static void
6111 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6112 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6113 HOST_WIDE_INT bitsize, rtx loc_note)
6114 {
6115 HOST_WIDE_INT diff;
6116 bool copy = inner != NULL;
6117
6118 if (copy)
6119 {
6120 /* First copy all nodes preceding the current bitpos. */
6121 while (src != inner)
6122 {
6123 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6124 decl_piece_bitsize (*src), NULL_RTX);
6125 dest = &XEXP (*dest, 1);
6126 src = &XEXP (*src, 1);
6127 }
6128 }
6129 /* Add padding if needed. */
6130 if (bitpos != piece_bitpos)
6131 {
6132 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6133 copy ? NULL_RTX : *dest);
6134 dest = &XEXP (*dest, 1);
6135 }
6136 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6137 {
6138 gcc_assert (!copy);
6139 /* A piece with correct bitpos and bitsize already exist,
6140 just update the location for it and return. */
6141 *decl_piece_varloc_ptr (*dest) = loc_note;
6142 return;
6143 }
6144 /* Add the piece that changed. */
6145 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6146 dest = &XEXP (*dest, 1);
6147 /* Skip over pieces that overlap it. */
6148 diff = bitpos - piece_bitpos + bitsize;
6149 if (!copy)
6150 src = dest;
6151 while (diff > 0 && *src)
6152 {
6153 rtx piece = *src;
6154 diff -= decl_piece_bitsize (piece);
6155 if (copy)
6156 src = &XEXP (piece, 1);
6157 else
6158 {
6159 *src = XEXP (piece, 1);
6160 free_EXPR_LIST_node (piece);
6161 }
6162 }
6163 /* Add padding if needed. */
6164 if (diff < 0 && *src)
6165 {
6166 if (!copy)
6167 dest = src;
6168 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6169 dest = &XEXP (*dest, 1);
6170 }
6171 if (!copy)
6172 return;
6173 /* Finally copy all nodes following it. */
6174 while (*src)
6175 {
6176 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6177 decl_piece_bitsize (*src), NULL_RTX);
6178 dest = &XEXP (*dest, 1);
6179 src = &XEXP (*src, 1);
6180 }
6181 }
6182
6183 /* Add a variable location node to the linked list for DECL. */
6184
6185 static struct var_loc_node *
6186 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6187 {
6188 unsigned int decl_id;
6189 var_loc_list *temp;
6190 struct var_loc_node *loc = NULL;
6191 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6192
6193 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6194 {
6195 tree realdecl = DECL_DEBUG_EXPR (decl);
6196 if (handled_component_p (realdecl)
6197 || (TREE_CODE (realdecl) == MEM_REF
6198 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6199 {
6200 bool reverse;
6201 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6202 &bitsize, &reverse);
6203 if (!innerdecl
6204 || !DECL_P (innerdecl)
6205 || DECL_IGNORED_P (innerdecl)
6206 || TREE_STATIC (innerdecl)
6207 || bitsize == 0
6208 || bitpos + bitsize > 256)
6209 return NULL;
6210 decl = innerdecl;
6211 }
6212 }
6213
6214 decl_id = DECL_UID (decl);
6215 var_loc_list **slot
6216 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6217 if (*slot == NULL)
6218 {
6219 temp = ggc_cleared_alloc<var_loc_list> ();
6220 temp->decl_id = decl_id;
6221 *slot = temp;
6222 }
6223 else
6224 temp = *slot;
6225
6226 /* For PARM_DECLs try to keep around the original incoming value,
6227 even if that means we'll emit a zero-range .debug_loc entry. */
6228 if (temp->last
6229 && temp->first == temp->last
6230 && TREE_CODE (decl) == PARM_DECL
6231 && NOTE_P (temp->first->loc)
6232 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6233 && DECL_INCOMING_RTL (decl)
6234 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6235 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6236 == GET_CODE (DECL_INCOMING_RTL (decl))
6237 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6238 && (bitsize != -1
6239 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6240 NOTE_VAR_LOCATION_LOC (loc_note))
6241 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6242 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6243 {
6244 loc = ggc_cleared_alloc<var_loc_node> ();
6245 temp->first->next = loc;
6246 temp->last = loc;
6247 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6248 }
6249 else if (temp->last)
6250 {
6251 struct var_loc_node *last = temp->last, *unused = NULL;
6252 rtx *piece_loc = NULL, last_loc_note;
6253 HOST_WIDE_INT piece_bitpos = 0;
6254 if (last->next)
6255 {
6256 last = last->next;
6257 gcc_assert (last->next == NULL);
6258 }
6259 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6260 {
6261 piece_loc = &last->loc;
6262 do
6263 {
6264 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6265 if (piece_bitpos + cur_bitsize > bitpos)
6266 break;
6267 piece_bitpos += cur_bitsize;
6268 piece_loc = &XEXP (*piece_loc, 1);
6269 }
6270 while (*piece_loc);
6271 }
6272 /* TEMP->LAST here is either pointer to the last but one or
6273 last element in the chained list, LAST is pointer to the
6274 last element. */
6275 if (label && strcmp (last->label, label) == 0 && last->view == view)
6276 {
6277 /* For SRA optimized variables if there weren't any real
6278 insns since last note, just modify the last node. */
6279 if (piece_loc != NULL)
6280 {
6281 adjust_piece_list (piece_loc, NULL, NULL,
6282 bitpos, piece_bitpos, bitsize, loc_note);
6283 return NULL;
6284 }
6285 /* If the last note doesn't cover any instructions, remove it. */
6286 if (temp->last != last)
6287 {
6288 temp->last->next = NULL;
6289 unused = last;
6290 last = temp->last;
6291 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6292 }
6293 else
6294 {
6295 gcc_assert (temp->first == temp->last
6296 || (temp->first->next == temp->last
6297 && TREE_CODE (decl) == PARM_DECL));
6298 memset (temp->last, '\0', sizeof (*temp->last));
6299 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6300 return temp->last;
6301 }
6302 }
6303 if (bitsize == -1 && NOTE_P (last->loc))
6304 last_loc_note = last->loc;
6305 else if (piece_loc != NULL
6306 && *piece_loc != NULL_RTX
6307 && piece_bitpos == bitpos
6308 && decl_piece_bitsize (*piece_loc) == bitsize)
6309 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6310 else
6311 last_loc_note = NULL_RTX;
6312 /* If the current location is the same as the end of the list,
6313 and either both or neither of the locations is uninitialized,
6314 we have nothing to do. */
6315 if (last_loc_note == NULL_RTX
6316 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6317 NOTE_VAR_LOCATION_LOC (loc_note)))
6318 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6319 != NOTE_VAR_LOCATION_STATUS (loc_note))
6320 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6321 == VAR_INIT_STATUS_UNINITIALIZED)
6322 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6323 == VAR_INIT_STATUS_UNINITIALIZED))))
6324 {
6325 /* Add LOC to the end of list and update LAST. If the last
6326 element of the list has been removed above, reuse its
6327 memory for the new node, otherwise allocate a new one. */
6328 if (unused)
6329 {
6330 loc = unused;
6331 memset (loc, '\0', sizeof (*loc));
6332 }
6333 else
6334 loc = ggc_cleared_alloc<var_loc_node> ();
6335 if (bitsize == -1 || piece_loc == NULL)
6336 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6337 else
6338 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6339 bitpos, piece_bitpos, bitsize, loc_note);
6340 last->next = loc;
6341 /* Ensure TEMP->LAST will point either to the new last but one
6342 element of the chain, or to the last element in it. */
6343 if (last != temp->last)
6344 temp->last = last;
6345 }
6346 else if (unused)
6347 ggc_free (unused);
6348 }
6349 else
6350 {
6351 loc = ggc_cleared_alloc<var_loc_node> ();
6352 temp->first = loc;
6353 temp->last = loc;
6354 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6355 }
6356 return loc;
6357 }
6358 \f
6359 /* Keep track of the number of spaces used to indent the
6360 output of the debugging routines that print the structure of
6361 the DIE internal representation. */
6362 static int print_indent;
6363
6364 /* Indent the line the number of spaces given by print_indent. */
6365
6366 static inline void
6367 print_spaces (FILE *outfile)
6368 {
6369 fprintf (outfile, "%*s", print_indent, "");
6370 }
6371
6372 /* Print a type signature in hex. */
6373
6374 static inline void
6375 print_signature (FILE *outfile, char *sig)
6376 {
6377 int i;
6378
6379 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6380 fprintf (outfile, "%02x", sig[i] & 0xff);
6381 }
6382
6383 static inline void
6384 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6385 {
6386 if (discr_value->pos)
6387 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6388 else
6389 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6390 }
6391
6392 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6393
6394 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6395 RECURSE, output location descriptor operations. */
6396
6397 static void
6398 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6399 {
6400 switch (val->val_class)
6401 {
6402 case dw_val_class_addr:
6403 fprintf (outfile, "address");
6404 break;
6405 case dw_val_class_offset:
6406 fprintf (outfile, "offset");
6407 break;
6408 case dw_val_class_loc:
6409 fprintf (outfile, "location descriptor");
6410 if (val->v.val_loc == NULL)
6411 fprintf (outfile, " -> <null>\n");
6412 else if (recurse)
6413 {
6414 fprintf (outfile, ":\n");
6415 print_indent += 4;
6416 print_loc_descr (val->v.val_loc, outfile);
6417 print_indent -= 4;
6418 }
6419 else
6420 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6421 break;
6422 case dw_val_class_loc_list:
6423 fprintf (outfile, "location list -> label:%s",
6424 val->v.val_loc_list->ll_symbol);
6425 break;
6426 case dw_val_class_view_list:
6427 val = view_list_to_loc_list_val_node (val);
6428 fprintf (outfile, "location list with views -> labels:%s and %s",
6429 val->v.val_loc_list->ll_symbol,
6430 val->v.val_loc_list->vl_symbol);
6431 break;
6432 case dw_val_class_range_list:
6433 fprintf (outfile, "range list");
6434 break;
6435 case dw_val_class_const:
6436 case dw_val_class_const_implicit:
6437 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6438 break;
6439 case dw_val_class_unsigned_const:
6440 case dw_val_class_unsigned_const_implicit:
6441 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6442 break;
6443 case dw_val_class_const_double:
6444 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6445 HOST_WIDE_INT_PRINT_UNSIGNED")",
6446 val->v.val_double.high,
6447 val->v.val_double.low);
6448 break;
6449 case dw_val_class_wide_int:
6450 {
6451 int i = val->v.val_wide->get_len ();
6452 fprintf (outfile, "constant (");
6453 gcc_assert (i > 0);
6454 if (val->v.val_wide->elt (i - 1) == 0)
6455 fprintf (outfile, "0x");
6456 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6457 val->v.val_wide->elt (--i));
6458 while (--i >= 0)
6459 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6460 val->v.val_wide->elt (i));
6461 fprintf (outfile, ")");
6462 break;
6463 }
6464 case dw_val_class_vec:
6465 fprintf (outfile, "floating-point or vector constant");
6466 break;
6467 case dw_val_class_flag:
6468 fprintf (outfile, "%u", val->v.val_flag);
6469 break;
6470 case dw_val_class_die_ref:
6471 if (val->v.val_die_ref.die != NULL)
6472 {
6473 dw_die_ref die = val->v.val_die_ref.die;
6474
6475 if (die->comdat_type_p)
6476 {
6477 fprintf (outfile, "die -> signature: ");
6478 print_signature (outfile,
6479 die->die_id.die_type_node->signature);
6480 }
6481 else if (die->die_id.die_symbol)
6482 {
6483 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6484 if (die->with_offset)
6485 fprintf (outfile, " + %ld", die->die_offset);
6486 }
6487 else
6488 fprintf (outfile, "die -> %ld", die->die_offset);
6489 fprintf (outfile, " (%p)", (void *) die);
6490 }
6491 else
6492 fprintf (outfile, "die -> <null>");
6493 break;
6494 case dw_val_class_vms_delta:
6495 fprintf (outfile, "delta: @slotcount(%s-%s)",
6496 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6497 break;
6498 case dw_val_class_symview:
6499 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6500 break;
6501 case dw_val_class_lbl_id:
6502 case dw_val_class_lineptr:
6503 case dw_val_class_macptr:
6504 case dw_val_class_loclistsptr:
6505 case dw_val_class_high_pc:
6506 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6507 break;
6508 case dw_val_class_str:
6509 if (val->v.val_str->str != NULL)
6510 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6511 else
6512 fprintf (outfile, "<null>");
6513 break;
6514 case dw_val_class_file:
6515 case dw_val_class_file_implicit:
6516 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6517 val->v.val_file->emitted_number);
6518 break;
6519 case dw_val_class_data8:
6520 {
6521 int i;
6522
6523 for (i = 0; i < 8; i++)
6524 fprintf (outfile, "%02x", val->v.val_data8[i]);
6525 break;
6526 }
6527 case dw_val_class_discr_value:
6528 print_discr_value (outfile, &val->v.val_discr_value);
6529 break;
6530 case dw_val_class_discr_list:
6531 for (dw_discr_list_ref node = val->v.val_discr_list;
6532 node != NULL;
6533 node = node->dw_discr_next)
6534 {
6535 if (node->dw_discr_range)
6536 {
6537 fprintf (outfile, " .. ");
6538 print_discr_value (outfile, &node->dw_discr_lower_bound);
6539 print_discr_value (outfile, &node->dw_discr_upper_bound);
6540 }
6541 else
6542 print_discr_value (outfile, &node->dw_discr_lower_bound);
6543
6544 if (node->dw_discr_next != NULL)
6545 fprintf (outfile, " | ");
6546 }
6547 default:
6548 break;
6549 }
6550 }
6551
6552 /* Likewise, for a DIE attribute. */
6553
6554 static void
6555 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6556 {
6557 print_dw_val (&a->dw_attr_val, recurse, outfile);
6558 }
6559
6560
6561 /* Print the list of operands in the LOC location description to OUTFILE. This
6562 routine is a debugging aid only. */
6563
6564 static void
6565 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6566 {
6567 dw_loc_descr_ref l = loc;
6568
6569 if (loc == NULL)
6570 {
6571 print_spaces (outfile);
6572 fprintf (outfile, "<null>\n");
6573 return;
6574 }
6575
6576 for (l = loc; l != NULL; l = l->dw_loc_next)
6577 {
6578 print_spaces (outfile);
6579 fprintf (outfile, "(%p) %s",
6580 (void *) l,
6581 dwarf_stack_op_name (l->dw_loc_opc));
6582 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6583 {
6584 fprintf (outfile, " ");
6585 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6586 }
6587 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6588 {
6589 fprintf (outfile, ", ");
6590 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6591 }
6592 fprintf (outfile, "\n");
6593 }
6594 }
6595
6596 /* Print the information associated with a given DIE, and its children.
6597 This routine is a debugging aid only. */
6598
6599 static void
6600 print_die (dw_die_ref die, FILE *outfile)
6601 {
6602 dw_attr_node *a;
6603 dw_die_ref c;
6604 unsigned ix;
6605
6606 print_spaces (outfile);
6607 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6608 die->die_offset, dwarf_tag_name (die->die_tag),
6609 (void*) die);
6610 print_spaces (outfile);
6611 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6612 fprintf (outfile, " offset: %ld", die->die_offset);
6613 fprintf (outfile, " mark: %d\n", die->die_mark);
6614
6615 if (die->comdat_type_p)
6616 {
6617 print_spaces (outfile);
6618 fprintf (outfile, " signature: ");
6619 print_signature (outfile, die->die_id.die_type_node->signature);
6620 fprintf (outfile, "\n");
6621 }
6622
6623 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6624 {
6625 print_spaces (outfile);
6626 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6627
6628 print_attribute (a, true, outfile);
6629 fprintf (outfile, "\n");
6630 }
6631
6632 if (die->die_child != NULL)
6633 {
6634 print_indent += 4;
6635 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6636 print_indent -= 4;
6637 }
6638 if (print_indent == 0)
6639 fprintf (outfile, "\n");
6640 }
6641
6642 /* Print the list of operations in the LOC location description. */
6643
6644 DEBUG_FUNCTION void
6645 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6646 {
6647 print_loc_descr (loc, stderr);
6648 }
6649
6650 /* Print the information collected for a given DIE. */
6651
6652 DEBUG_FUNCTION void
6653 debug_dwarf_die (dw_die_ref die)
6654 {
6655 print_die (die, stderr);
6656 }
6657
6658 DEBUG_FUNCTION void
6659 debug (die_struct &ref)
6660 {
6661 print_die (&ref, stderr);
6662 }
6663
6664 DEBUG_FUNCTION void
6665 debug (die_struct *ptr)
6666 {
6667 if (ptr)
6668 debug (*ptr);
6669 else
6670 fprintf (stderr, "<nil>\n");
6671 }
6672
6673
6674 /* Print all DWARF information collected for the compilation unit.
6675 This routine is a debugging aid only. */
6676
6677 DEBUG_FUNCTION void
6678 debug_dwarf (void)
6679 {
6680 print_indent = 0;
6681 print_die (comp_unit_die (), stderr);
6682 }
6683
6684 /* Verify the DIE tree structure. */
6685
6686 DEBUG_FUNCTION void
6687 verify_die (dw_die_ref die)
6688 {
6689 gcc_assert (!die->die_mark);
6690 if (die->die_parent == NULL
6691 && die->die_sib == NULL)
6692 return;
6693 /* Verify the die_sib list is cyclic. */
6694 dw_die_ref x = die;
6695 do
6696 {
6697 x->die_mark = 1;
6698 x = x->die_sib;
6699 }
6700 while (x && !x->die_mark);
6701 gcc_assert (x == die);
6702 x = die;
6703 do
6704 {
6705 /* Verify all dies have the same parent. */
6706 gcc_assert (x->die_parent == die->die_parent);
6707 if (x->die_child)
6708 {
6709 /* Verify the child has the proper parent and recurse. */
6710 gcc_assert (x->die_child->die_parent == x);
6711 verify_die (x->die_child);
6712 }
6713 x->die_mark = 0;
6714 x = x->die_sib;
6715 }
6716 while (x && x->die_mark);
6717 }
6718
6719 /* Sanity checks on DIEs. */
6720
6721 static void
6722 check_die (dw_die_ref die)
6723 {
6724 unsigned ix;
6725 dw_attr_node *a;
6726 bool inline_found = false;
6727 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6728 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6729 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6730 {
6731 switch (a->dw_attr)
6732 {
6733 case DW_AT_inline:
6734 if (a->dw_attr_val.v.val_unsigned)
6735 inline_found = true;
6736 break;
6737 case DW_AT_location:
6738 ++n_location;
6739 break;
6740 case DW_AT_low_pc:
6741 ++n_low_pc;
6742 break;
6743 case DW_AT_high_pc:
6744 ++n_high_pc;
6745 break;
6746 case DW_AT_artificial:
6747 ++n_artificial;
6748 break;
6749 case DW_AT_decl_column:
6750 ++n_decl_column;
6751 break;
6752 case DW_AT_decl_line:
6753 ++n_decl_line;
6754 break;
6755 case DW_AT_decl_file:
6756 ++n_decl_file;
6757 break;
6758 default:
6759 break;
6760 }
6761 }
6762 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6763 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6764 {
6765 fprintf (stderr, "Duplicate attributes in DIE:\n");
6766 debug_dwarf_die (die);
6767 gcc_unreachable ();
6768 }
6769 if (inline_found)
6770 {
6771 /* A debugging information entry that is a member of an abstract
6772 instance tree [that has DW_AT_inline] should not contain any
6773 attributes which describe aspects of the subroutine which vary
6774 between distinct inlined expansions or distinct out-of-line
6775 expansions. */
6776 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6777 gcc_assert (a->dw_attr != DW_AT_low_pc
6778 && a->dw_attr != DW_AT_high_pc
6779 && a->dw_attr != DW_AT_location
6780 && a->dw_attr != DW_AT_frame_base
6781 && a->dw_attr != DW_AT_call_all_calls
6782 && a->dw_attr != DW_AT_GNU_all_call_sites);
6783 }
6784 }
6785 \f
6786 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6787 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6788 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6789
6790 /* Calculate the checksum of a location expression. */
6791
6792 static inline void
6793 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6794 {
6795 int tem;
6796 inchash::hash hstate;
6797 hashval_t hash;
6798
6799 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6800 CHECKSUM (tem);
6801 hash_loc_operands (loc, hstate);
6802 hash = hstate.end();
6803 CHECKSUM (hash);
6804 }
6805
6806 /* Calculate the checksum of an attribute. */
6807
6808 static void
6809 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6810 {
6811 dw_loc_descr_ref loc;
6812 rtx r;
6813
6814 CHECKSUM (at->dw_attr);
6815
6816 /* We don't care that this was compiled with a different compiler
6817 snapshot; if the output is the same, that's what matters. */
6818 if (at->dw_attr == DW_AT_producer)
6819 return;
6820
6821 switch (AT_class (at))
6822 {
6823 case dw_val_class_const:
6824 case dw_val_class_const_implicit:
6825 CHECKSUM (at->dw_attr_val.v.val_int);
6826 break;
6827 case dw_val_class_unsigned_const:
6828 case dw_val_class_unsigned_const_implicit:
6829 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6830 break;
6831 case dw_val_class_const_double:
6832 CHECKSUM (at->dw_attr_val.v.val_double);
6833 break;
6834 case dw_val_class_wide_int:
6835 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6836 get_full_len (*at->dw_attr_val.v.val_wide)
6837 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6838 break;
6839 case dw_val_class_vec:
6840 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6841 (at->dw_attr_val.v.val_vec.length
6842 * at->dw_attr_val.v.val_vec.elt_size));
6843 break;
6844 case dw_val_class_flag:
6845 CHECKSUM (at->dw_attr_val.v.val_flag);
6846 break;
6847 case dw_val_class_str:
6848 CHECKSUM_STRING (AT_string (at));
6849 break;
6850
6851 case dw_val_class_addr:
6852 r = AT_addr (at);
6853 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6854 CHECKSUM_STRING (XSTR (r, 0));
6855 break;
6856
6857 case dw_val_class_offset:
6858 CHECKSUM (at->dw_attr_val.v.val_offset);
6859 break;
6860
6861 case dw_val_class_loc:
6862 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6863 loc_checksum (loc, ctx);
6864 break;
6865
6866 case dw_val_class_die_ref:
6867 die_checksum (AT_ref (at), ctx, mark);
6868 break;
6869
6870 case dw_val_class_fde_ref:
6871 case dw_val_class_vms_delta:
6872 case dw_val_class_symview:
6873 case dw_val_class_lbl_id:
6874 case dw_val_class_lineptr:
6875 case dw_val_class_macptr:
6876 case dw_val_class_loclistsptr:
6877 case dw_val_class_high_pc:
6878 break;
6879
6880 case dw_val_class_file:
6881 case dw_val_class_file_implicit:
6882 CHECKSUM_STRING (AT_file (at)->filename);
6883 break;
6884
6885 case dw_val_class_data8:
6886 CHECKSUM (at->dw_attr_val.v.val_data8);
6887 break;
6888
6889 default:
6890 break;
6891 }
6892 }
6893
6894 /* Calculate the checksum of a DIE. */
6895
6896 static void
6897 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6898 {
6899 dw_die_ref c;
6900 dw_attr_node *a;
6901 unsigned ix;
6902
6903 /* To avoid infinite recursion. */
6904 if (die->die_mark)
6905 {
6906 CHECKSUM (die->die_mark);
6907 return;
6908 }
6909 die->die_mark = ++(*mark);
6910
6911 CHECKSUM (die->die_tag);
6912
6913 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6914 attr_checksum (a, ctx, mark);
6915
6916 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6917 }
6918
6919 #undef CHECKSUM
6920 #undef CHECKSUM_BLOCK
6921 #undef CHECKSUM_STRING
6922
6923 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6924 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6925 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6926 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6927 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6928 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6929 #define CHECKSUM_ATTR(FOO) \
6930 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6931
6932 /* Calculate the checksum of a number in signed LEB128 format. */
6933
6934 static void
6935 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6936 {
6937 unsigned char byte;
6938 bool more;
6939
6940 while (1)
6941 {
6942 byte = (value & 0x7f);
6943 value >>= 7;
6944 more = !((value == 0 && (byte & 0x40) == 0)
6945 || (value == -1 && (byte & 0x40) != 0));
6946 if (more)
6947 byte |= 0x80;
6948 CHECKSUM (byte);
6949 if (!more)
6950 break;
6951 }
6952 }
6953
6954 /* Calculate the checksum of a number in unsigned LEB128 format. */
6955
6956 static void
6957 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6958 {
6959 while (1)
6960 {
6961 unsigned char byte = (value & 0x7f);
6962 value >>= 7;
6963 if (value != 0)
6964 /* More bytes to follow. */
6965 byte |= 0x80;
6966 CHECKSUM (byte);
6967 if (value == 0)
6968 break;
6969 }
6970 }
6971
6972 /* Checksum the context of the DIE. This adds the names of any
6973 surrounding namespaces or structures to the checksum. */
6974
6975 static void
6976 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6977 {
6978 const char *name;
6979 dw_die_ref spec;
6980 int tag = die->die_tag;
6981
6982 if (tag != DW_TAG_namespace
6983 && tag != DW_TAG_structure_type
6984 && tag != DW_TAG_class_type)
6985 return;
6986
6987 name = get_AT_string (die, DW_AT_name);
6988
6989 spec = get_AT_ref (die, DW_AT_specification);
6990 if (spec != NULL)
6991 die = spec;
6992
6993 if (die->die_parent != NULL)
6994 checksum_die_context (die->die_parent, ctx);
6995
6996 CHECKSUM_ULEB128 ('C');
6997 CHECKSUM_ULEB128 (tag);
6998 if (name != NULL)
6999 CHECKSUM_STRING (name);
7000 }
7001
7002 /* Calculate the checksum of a location expression. */
7003
7004 static inline void
7005 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7006 {
7007 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7008 were emitted as a DW_FORM_sdata instead of a location expression. */
7009 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7010 {
7011 CHECKSUM_ULEB128 (DW_FORM_sdata);
7012 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7013 return;
7014 }
7015
7016 /* Otherwise, just checksum the raw location expression. */
7017 while (loc != NULL)
7018 {
7019 inchash::hash hstate;
7020 hashval_t hash;
7021
7022 CHECKSUM_ULEB128 (loc->dtprel);
7023 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7024 hash_loc_operands (loc, hstate);
7025 hash = hstate.end ();
7026 CHECKSUM (hash);
7027 loc = loc->dw_loc_next;
7028 }
7029 }
7030
7031 /* Calculate the checksum of an attribute. */
7032
7033 static void
7034 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7035 struct md5_ctx *ctx, int *mark)
7036 {
7037 dw_loc_descr_ref loc;
7038 rtx r;
7039
7040 if (AT_class (at) == dw_val_class_die_ref)
7041 {
7042 dw_die_ref target_die = AT_ref (at);
7043
7044 /* For pointer and reference types, we checksum only the (qualified)
7045 name of the target type (if there is a name). For friend entries,
7046 we checksum only the (qualified) name of the target type or function.
7047 This allows the checksum to remain the same whether the target type
7048 is complete or not. */
7049 if ((at->dw_attr == DW_AT_type
7050 && (tag == DW_TAG_pointer_type
7051 || tag == DW_TAG_reference_type
7052 || tag == DW_TAG_rvalue_reference_type
7053 || tag == DW_TAG_ptr_to_member_type))
7054 || (at->dw_attr == DW_AT_friend
7055 && tag == DW_TAG_friend))
7056 {
7057 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7058
7059 if (name_attr != NULL)
7060 {
7061 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7062
7063 if (decl == NULL)
7064 decl = target_die;
7065 CHECKSUM_ULEB128 ('N');
7066 CHECKSUM_ULEB128 (at->dw_attr);
7067 if (decl->die_parent != NULL)
7068 checksum_die_context (decl->die_parent, ctx);
7069 CHECKSUM_ULEB128 ('E');
7070 CHECKSUM_STRING (AT_string (name_attr));
7071 return;
7072 }
7073 }
7074
7075 /* For all other references to another DIE, we check to see if the
7076 target DIE has already been visited. If it has, we emit a
7077 backward reference; if not, we descend recursively. */
7078 if (target_die->die_mark > 0)
7079 {
7080 CHECKSUM_ULEB128 ('R');
7081 CHECKSUM_ULEB128 (at->dw_attr);
7082 CHECKSUM_ULEB128 (target_die->die_mark);
7083 }
7084 else
7085 {
7086 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7087
7088 if (decl == NULL)
7089 decl = target_die;
7090 target_die->die_mark = ++(*mark);
7091 CHECKSUM_ULEB128 ('T');
7092 CHECKSUM_ULEB128 (at->dw_attr);
7093 if (decl->die_parent != NULL)
7094 checksum_die_context (decl->die_parent, ctx);
7095 die_checksum_ordered (target_die, ctx, mark);
7096 }
7097 return;
7098 }
7099
7100 CHECKSUM_ULEB128 ('A');
7101 CHECKSUM_ULEB128 (at->dw_attr);
7102
7103 switch (AT_class (at))
7104 {
7105 case dw_val_class_const:
7106 case dw_val_class_const_implicit:
7107 CHECKSUM_ULEB128 (DW_FORM_sdata);
7108 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7109 break;
7110
7111 case dw_val_class_unsigned_const:
7112 case dw_val_class_unsigned_const_implicit:
7113 CHECKSUM_ULEB128 (DW_FORM_sdata);
7114 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7115 break;
7116
7117 case dw_val_class_const_double:
7118 CHECKSUM_ULEB128 (DW_FORM_block);
7119 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7120 CHECKSUM (at->dw_attr_val.v.val_double);
7121 break;
7122
7123 case dw_val_class_wide_int:
7124 CHECKSUM_ULEB128 (DW_FORM_block);
7125 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7126 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7127 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7128 get_full_len (*at->dw_attr_val.v.val_wide)
7129 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7130 break;
7131
7132 case dw_val_class_vec:
7133 CHECKSUM_ULEB128 (DW_FORM_block);
7134 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7135 * at->dw_attr_val.v.val_vec.elt_size);
7136 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7137 (at->dw_attr_val.v.val_vec.length
7138 * at->dw_attr_val.v.val_vec.elt_size));
7139 break;
7140
7141 case dw_val_class_flag:
7142 CHECKSUM_ULEB128 (DW_FORM_flag);
7143 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7144 break;
7145
7146 case dw_val_class_str:
7147 CHECKSUM_ULEB128 (DW_FORM_string);
7148 CHECKSUM_STRING (AT_string (at));
7149 break;
7150
7151 case dw_val_class_addr:
7152 r = AT_addr (at);
7153 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7154 CHECKSUM_ULEB128 (DW_FORM_string);
7155 CHECKSUM_STRING (XSTR (r, 0));
7156 break;
7157
7158 case dw_val_class_offset:
7159 CHECKSUM_ULEB128 (DW_FORM_sdata);
7160 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7161 break;
7162
7163 case dw_val_class_loc:
7164 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7165 loc_checksum_ordered (loc, ctx);
7166 break;
7167
7168 case dw_val_class_fde_ref:
7169 case dw_val_class_symview:
7170 case dw_val_class_lbl_id:
7171 case dw_val_class_lineptr:
7172 case dw_val_class_macptr:
7173 case dw_val_class_loclistsptr:
7174 case dw_val_class_high_pc:
7175 break;
7176
7177 case dw_val_class_file:
7178 case dw_val_class_file_implicit:
7179 CHECKSUM_ULEB128 (DW_FORM_string);
7180 CHECKSUM_STRING (AT_file (at)->filename);
7181 break;
7182
7183 case dw_val_class_data8:
7184 CHECKSUM (at->dw_attr_val.v.val_data8);
7185 break;
7186
7187 default:
7188 break;
7189 }
7190 }
7191
7192 struct checksum_attributes
7193 {
7194 dw_attr_node *at_name;
7195 dw_attr_node *at_type;
7196 dw_attr_node *at_friend;
7197 dw_attr_node *at_accessibility;
7198 dw_attr_node *at_address_class;
7199 dw_attr_node *at_alignment;
7200 dw_attr_node *at_allocated;
7201 dw_attr_node *at_artificial;
7202 dw_attr_node *at_associated;
7203 dw_attr_node *at_binary_scale;
7204 dw_attr_node *at_bit_offset;
7205 dw_attr_node *at_bit_size;
7206 dw_attr_node *at_bit_stride;
7207 dw_attr_node *at_byte_size;
7208 dw_attr_node *at_byte_stride;
7209 dw_attr_node *at_const_value;
7210 dw_attr_node *at_containing_type;
7211 dw_attr_node *at_count;
7212 dw_attr_node *at_data_location;
7213 dw_attr_node *at_data_member_location;
7214 dw_attr_node *at_decimal_scale;
7215 dw_attr_node *at_decimal_sign;
7216 dw_attr_node *at_default_value;
7217 dw_attr_node *at_digit_count;
7218 dw_attr_node *at_discr;
7219 dw_attr_node *at_discr_list;
7220 dw_attr_node *at_discr_value;
7221 dw_attr_node *at_encoding;
7222 dw_attr_node *at_endianity;
7223 dw_attr_node *at_explicit;
7224 dw_attr_node *at_is_optional;
7225 dw_attr_node *at_location;
7226 dw_attr_node *at_lower_bound;
7227 dw_attr_node *at_mutable;
7228 dw_attr_node *at_ordering;
7229 dw_attr_node *at_picture_string;
7230 dw_attr_node *at_prototyped;
7231 dw_attr_node *at_small;
7232 dw_attr_node *at_segment;
7233 dw_attr_node *at_string_length;
7234 dw_attr_node *at_string_length_bit_size;
7235 dw_attr_node *at_string_length_byte_size;
7236 dw_attr_node *at_threads_scaled;
7237 dw_attr_node *at_upper_bound;
7238 dw_attr_node *at_use_location;
7239 dw_attr_node *at_use_UTF8;
7240 dw_attr_node *at_variable_parameter;
7241 dw_attr_node *at_virtuality;
7242 dw_attr_node *at_visibility;
7243 dw_attr_node *at_vtable_elem_location;
7244 };
7245
7246 /* Collect the attributes that we will want to use for the checksum. */
7247
7248 static void
7249 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7250 {
7251 dw_attr_node *a;
7252 unsigned ix;
7253
7254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7255 {
7256 switch (a->dw_attr)
7257 {
7258 case DW_AT_name:
7259 attrs->at_name = a;
7260 break;
7261 case DW_AT_type:
7262 attrs->at_type = a;
7263 break;
7264 case DW_AT_friend:
7265 attrs->at_friend = a;
7266 break;
7267 case DW_AT_accessibility:
7268 attrs->at_accessibility = a;
7269 break;
7270 case DW_AT_address_class:
7271 attrs->at_address_class = a;
7272 break;
7273 case DW_AT_alignment:
7274 attrs->at_alignment = a;
7275 break;
7276 case DW_AT_allocated:
7277 attrs->at_allocated = a;
7278 break;
7279 case DW_AT_artificial:
7280 attrs->at_artificial = a;
7281 break;
7282 case DW_AT_associated:
7283 attrs->at_associated = a;
7284 break;
7285 case DW_AT_binary_scale:
7286 attrs->at_binary_scale = a;
7287 break;
7288 case DW_AT_bit_offset:
7289 attrs->at_bit_offset = a;
7290 break;
7291 case DW_AT_bit_size:
7292 attrs->at_bit_size = a;
7293 break;
7294 case DW_AT_bit_stride:
7295 attrs->at_bit_stride = a;
7296 break;
7297 case DW_AT_byte_size:
7298 attrs->at_byte_size = a;
7299 break;
7300 case DW_AT_byte_stride:
7301 attrs->at_byte_stride = a;
7302 break;
7303 case DW_AT_const_value:
7304 attrs->at_const_value = a;
7305 break;
7306 case DW_AT_containing_type:
7307 attrs->at_containing_type = a;
7308 break;
7309 case DW_AT_count:
7310 attrs->at_count = a;
7311 break;
7312 case DW_AT_data_location:
7313 attrs->at_data_location = a;
7314 break;
7315 case DW_AT_data_member_location:
7316 attrs->at_data_member_location = a;
7317 break;
7318 case DW_AT_decimal_scale:
7319 attrs->at_decimal_scale = a;
7320 break;
7321 case DW_AT_decimal_sign:
7322 attrs->at_decimal_sign = a;
7323 break;
7324 case DW_AT_default_value:
7325 attrs->at_default_value = a;
7326 break;
7327 case DW_AT_digit_count:
7328 attrs->at_digit_count = a;
7329 break;
7330 case DW_AT_discr:
7331 attrs->at_discr = a;
7332 break;
7333 case DW_AT_discr_list:
7334 attrs->at_discr_list = a;
7335 break;
7336 case DW_AT_discr_value:
7337 attrs->at_discr_value = a;
7338 break;
7339 case DW_AT_encoding:
7340 attrs->at_encoding = a;
7341 break;
7342 case DW_AT_endianity:
7343 attrs->at_endianity = a;
7344 break;
7345 case DW_AT_explicit:
7346 attrs->at_explicit = a;
7347 break;
7348 case DW_AT_is_optional:
7349 attrs->at_is_optional = a;
7350 break;
7351 case DW_AT_location:
7352 attrs->at_location = a;
7353 break;
7354 case DW_AT_lower_bound:
7355 attrs->at_lower_bound = a;
7356 break;
7357 case DW_AT_mutable:
7358 attrs->at_mutable = a;
7359 break;
7360 case DW_AT_ordering:
7361 attrs->at_ordering = a;
7362 break;
7363 case DW_AT_picture_string:
7364 attrs->at_picture_string = a;
7365 break;
7366 case DW_AT_prototyped:
7367 attrs->at_prototyped = a;
7368 break;
7369 case DW_AT_small:
7370 attrs->at_small = a;
7371 break;
7372 case DW_AT_segment:
7373 attrs->at_segment = a;
7374 break;
7375 case DW_AT_string_length:
7376 attrs->at_string_length = a;
7377 break;
7378 case DW_AT_string_length_bit_size:
7379 attrs->at_string_length_bit_size = a;
7380 break;
7381 case DW_AT_string_length_byte_size:
7382 attrs->at_string_length_byte_size = a;
7383 break;
7384 case DW_AT_threads_scaled:
7385 attrs->at_threads_scaled = a;
7386 break;
7387 case DW_AT_upper_bound:
7388 attrs->at_upper_bound = a;
7389 break;
7390 case DW_AT_use_location:
7391 attrs->at_use_location = a;
7392 break;
7393 case DW_AT_use_UTF8:
7394 attrs->at_use_UTF8 = a;
7395 break;
7396 case DW_AT_variable_parameter:
7397 attrs->at_variable_parameter = a;
7398 break;
7399 case DW_AT_virtuality:
7400 attrs->at_virtuality = a;
7401 break;
7402 case DW_AT_visibility:
7403 attrs->at_visibility = a;
7404 break;
7405 case DW_AT_vtable_elem_location:
7406 attrs->at_vtable_elem_location = a;
7407 break;
7408 default:
7409 break;
7410 }
7411 }
7412 }
7413
7414 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7415
7416 static void
7417 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7418 {
7419 dw_die_ref c;
7420 dw_die_ref decl;
7421 struct checksum_attributes attrs;
7422
7423 CHECKSUM_ULEB128 ('D');
7424 CHECKSUM_ULEB128 (die->die_tag);
7425
7426 memset (&attrs, 0, sizeof (attrs));
7427
7428 decl = get_AT_ref (die, DW_AT_specification);
7429 if (decl != NULL)
7430 collect_checksum_attributes (&attrs, decl);
7431 collect_checksum_attributes (&attrs, die);
7432
7433 CHECKSUM_ATTR (attrs.at_name);
7434 CHECKSUM_ATTR (attrs.at_accessibility);
7435 CHECKSUM_ATTR (attrs.at_address_class);
7436 CHECKSUM_ATTR (attrs.at_allocated);
7437 CHECKSUM_ATTR (attrs.at_artificial);
7438 CHECKSUM_ATTR (attrs.at_associated);
7439 CHECKSUM_ATTR (attrs.at_binary_scale);
7440 CHECKSUM_ATTR (attrs.at_bit_offset);
7441 CHECKSUM_ATTR (attrs.at_bit_size);
7442 CHECKSUM_ATTR (attrs.at_bit_stride);
7443 CHECKSUM_ATTR (attrs.at_byte_size);
7444 CHECKSUM_ATTR (attrs.at_byte_stride);
7445 CHECKSUM_ATTR (attrs.at_const_value);
7446 CHECKSUM_ATTR (attrs.at_containing_type);
7447 CHECKSUM_ATTR (attrs.at_count);
7448 CHECKSUM_ATTR (attrs.at_data_location);
7449 CHECKSUM_ATTR (attrs.at_data_member_location);
7450 CHECKSUM_ATTR (attrs.at_decimal_scale);
7451 CHECKSUM_ATTR (attrs.at_decimal_sign);
7452 CHECKSUM_ATTR (attrs.at_default_value);
7453 CHECKSUM_ATTR (attrs.at_digit_count);
7454 CHECKSUM_ATTR (attrs.at_discr);
7455 CHECKSUM_ATTR (attrs.at_discr_list);
7456 CHECKSUM_ATTR (attrs.at_discr_value);
7457 CHECKSUM_ATTR (attrs.at_encoding);
7458 CHECKSUM_ATTR (attrs.at_endianity);
7459 CHECKSUM_ATTR (attrs.at_explicit);
7460 CHECKSUM_ATTR (attrs.at_is_optional);
7461 CHECKSUM_ATTR (attrs.at_location);
7462 CHECKSUM_ATTR (attrs.at_lower_bound);
7463 CHECKSUM_ATTR (attrs.at_mutable);
7464 CHECKSUM_ATTR (attrs.at_ordering);
7465 CHECKSUM_ATTR (attrs.at_picture_string);
7466 CHECKSUM_ATTR (attrs.at_prototyped);
7467 CHECKSUM_ATTR (attrs.at_small);
7468 CHECKSUM_ATTR (attrs.at_segment);
7469 CHECKSUM_ATTR (attrs.at_string_length);
7470 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7471 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7472 CHECKSUM_ATTR (attrs.at_threads_scaled);
7473 CHECKSUM_ATTR (attrs.at_upper_bound);
7474 CHECKSUM_ATTR (attrs.at_use_location);
7475 CHECKSUM_ATTR (attrs.at_use_UTF8);
7476 CHECKSUM_ATTR (attrs.at_variable_parameter);
7477 CHECKSUM_ATTR (attrs.at_virtuality);
7478 CHECKSUM_ATTR (attrs.at_visibility);
7479 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7480 CHECKSUM_ATTR (attrs.at_type);
7481 CHECKSUM_ATTR (attrs.at_friend);
7482 CHECKSUM_ATTR (attrs.at_alignment);
7483
7484 /* Checksum the child DIEs. */
7485 c = die->die_child;
7486 if (c) do {
7487 dw_attr_node *name_attr;
7488
7489 c = c->die_sib;
7490 name_attr = get_AT (c, DW_AT_name);
7491 if (is_template_instantiation (c))
7492 {
7493 /* Ignore instantiations of member type and function templates. */
7494 }
7495 else if (name_attr != NULL
7496 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7497 {
7498 /* Use a shallow checksum for named nested types and member
7499 functions. */
7500 CHECKSUM_ULEB128 ('S');
7501 CHECKSUM_ULEB128 (c->die_tag);
7502 CHECKSUM_STRING (AT_string (name_attr));
7503 }
7504 else
7505 {
7506 /* Use a deep checksum for other children. */
7507 /* Mark this DIE so it gets processed when unmarking. */
7508 if (c->die_mark == 0)
7509 c->die_mark = -1;
7510 die_checksum_ordered (c, ctx, mark);
7511 }
7512 } while (c != die->die_child);
7513
7514 CHECKSUM_ULEB128 (0);
7515 }
7516
7517 /* Add a type name and tag to a hash. */
7518 static void
7519 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7520 {
7521 CHECKSUM_ULEB128 (tag);
7522 CHECKSUM_STRING (name);
7523 }
7524
7525 #undef CHECKSUM
7526 #undef CHECKSUM_STRING
7527 #undef CHECKSUM_ATTR
7528 #undef CHECKSUM_LEB128
7529 #undef CHECKSUM_ULEB128
7530
7531 /* Generate the type signature for DIE. This is computed by generating an
7532 MD5 checksum over the DIE's tag, its relevant attributes, and its
7533 children. Attributes that are references to other DIEs are processed
7534 by recursion, using the MARK field to prevent infinite recursion.
7535 If the DIE is nested inside a namespace or another type, we also
7536 need to include that context in the signature. The lower 64 bits
7537 of the resulting MD5 checksum comprise the signature. */
7538
7539 static void
7540 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7541 {
7542 int mark;
7543 const char *name;
7544 unsigned char checksum[16];
7545 struct md5_ctx ctx;
7546 dw_die_ref decl;
7547 dw_die_ref parent;
7548
7549 name = get_AT_string (die, DW_AT_name);
7550 decl = get_AT_ref (die, DW_AT_specification);
7551 parent = get_die_parent (die);
7552
7553 /* First, compute a signature for just the type name (and its surrounding
7554 context, if any. This is stored in the type unit DIE for link-time
7555 ODR (one-definition rule) checking. */
7556
7557 if (is_cxx () && name != NULL)
7558 {
7559 md5_init_ctx (&ctx);
7560
7561 /* Checksum the names of surrounding namespaces and structures. */
7562 if (parent != NULL)
7563 checksum_die_context (parent, &ctx);
7564
7565 /* Checksum the current DIE. */
7566 die_odr_checksum (die->die_tag, name, &ctx);
7567 md5_finish_ctx (&ctx, checksum);
7568
7569 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7570 }
7571
7572 /* Next, compute the complete type signature. */
7573
7574 md5_init_ctx (&ctx);
7575 mark = 1;
7576 die->die_mark = mark;
7577
7578 /* Checksum the names of surrounding namespaces and structures. */
7579 if (parent != NULL)
7580 checksum_die_context (parent, &ctx);
7581
7582 /* Checksum the DIE and its children. */
7583 die_checksum_ordered (die, &ctx, &mark);
7584 unmark_all_dies (die);
7585 md5_finish_ctx (&ctx, checksum);
7586
7587 /* Store the signature in the type node and link the type DIE and the
7588 type node together. */
7589 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7590 DWARF_TYPE_SIGNATURE_SIZE);
7591 die->comdat_type_p = true;
7592 die->die_id.die_type_node = type_node;
7593 type_node->type_die = die;
7594
7595 /* If the DIE is a specification, link its declaration to the type node
7596 as well. */
7597 if (decl != NULL)
7598 {
7599 decl->comdat_type_p = true;
7600 decl->die_id.die_type_node = type_node;
7601 }
7602 }
7603
7604 /* Do the location expressions look same? */
7605 static inline int
7606 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7607 {
7608 return loc1->dw_loc_opc == loc2->dw_loc_opc
7609 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7610 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7611 }
7612
7613 /* Do the values look the same? */
7614 static int
7615 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7616 {
7617 dw_loc_descr_ref loc1, loc2;
7618 rtx r1, r2;
7619
7620 if (v1->val_class != v2->val_class)
7621 return 0;
7622
7623 switch (v1->val_class)
7624 {
7625 case dw_val_class_const:
7626 case dw_val_class_const_implicit:
7627 return v1->v.val_int == v2->v.val_int;
7628 case dw_val_class_unsigned_const:
7629 case dw_val_class_unsigned_const_implicit:
7630 return v1->v.val_unsigned == v2->v.val_unsigned;
7631 case dw_val_class_const_double:
7632 return v1->v.val_double.high == v2->v.val_double.high
7633 && v1->v.val_double.low == v2->v.val_double.low;
7634 case dw_val_class_wide_int:
7635 return *v1->v.val_wide == *v2->v.val_wide;
7636 case dw_val_class_vec:
7637 if (v1->v.val_vec.length != v2->v.val_vec.length
7638 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7639 return 0;
7640 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7641 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7642 return 0;
7643 return 1;
7644 case dw_val_class_flag:
7645 return v1->v.val_flag == v2->v.val_flag;
7646 case dw_val_class_str:
7647 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7648
7649 case dw_val_class_addr:
7650 r1 = v1->v.val_addr;
7651 r2 = v2->v.val_addr;
7652 if (GET_CODE (r1) != GET_CODE (r2))
7653 return 0;
7654 return !rtx_equal_p (r1, r2);
7655
7656 case dw_val_class_offset:
7657 return v1->v.val_offset == v2->v.val_offset;
7658
7659 case dw_val_class_loc:
7660 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7661 loc1 && loc2;
7662 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7663 if (!same_loc_p (loc1, loc2, mark))
7664 return 0;
7665 return !loc1 && !loc2;
7666
7667 case dw_val_class_die_ref:
7668 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7669
7670 case dw_val_class_symview:
7671 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7672
7673 case dw_val_class_fde_ref:
7674 case dw_val_class_vms_delta:
7675 case dw_val_class_lbl_id:
7676 case dw_val_class_lineptr:
7677 case dw_val_class_macptr:
7678 case dw_val_class_loclistsptr:
7679 case dw_val_class_high_pc:
7680 return 1;
7681
7682 case dw_val_class_file:
7683 case dw_val_class_file_implicit:
7684 return v1->v.val_file == v2->v.val_file;
7685
7686 case dw_val_class_data8:
7687 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7688
7689 default:
7690 return 1;
7691 }
7692 }
7693
7694 /* Do the attributes look the same? */
7695
7696 static int
7697 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7698 {
7699 if (at1->dw_attr != at2->dw_attr)
7700 return 0;
7701
7702 /* We don't care that this was compiled with a different compiler
7703 snapshot; if the output is the same, that's what matters. */
7704 if (at1->dw_attr == DW_AT_producer)
7705 return 1;
7706
7707 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7708 }
7709
7710 /* Do the dies look the same? */
7711
7712 static int
7713 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7714 {
7715 dw_die_ref c1, c2;
7716 dw_attr_node *a1;
7717 unsigned ix;
7718
7719 /* To avoid infinite recursion. */
7720 if (die1->die_mark)
7721 return die1->die_mark == die2->die_mark;
7722 die1->die_mark = die2->die_mark = ++(*mark);
7723
7724 if (die1->die_tag != die2->die_tag)
7725 return 0;
7726
7727 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7728 return 0;
7729
7730 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7731 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7732 return 0;
7733
7734 c1 = die1->die_child;
7735 c2 = die2->die_child;
7736 if (! c1)
7737 {
7738 if (c2)
7739 return 0;
7740 }
7741 else
7742 for (;;)
7743 {
7744 if (!same_die_p (c1, c2, mark))
7745 return 0;
7746 c1 = c1->die_sib;
7747 c2 = c2->die_sib;
7748 if (c1 == die1->die_child)
7749 {
7750 if (c2 == die2->die_child)
7751 break;
7752 else
7753 return 0;
7754 }
7755 }
7756
7757 return 1;
7758 }
7759
7760 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7761 children, and set die_symbol. */
7762
7763 static void
7764 compute_comp_unit_symbol (dw_die_ref unit_die)
7765 {
7766 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7767 const char *base = die_name ? lbasename (die_name) : "anonymous";
7768 char *name = XALLOCAVEC (char, strlen (base) + 64);
7769 char *p;
7770 int i, mark;
7771 unsigned char checksum[16];
7772 struct md5_ctx ctx;
7773
7774 /* Compute the checksum of the DIE, then append part of it as hex digits to
7775 the name filename of the unit. */
7776
7777 md5_init_ctx (&ctx);
7778 mark = 0;
7779 die_checksum (unit_die, &ctx, &mark);
7780 unmark_all_dies (unit_die);
7781 md5_finish_ctx (&ctx, checksum);
7782
7783 /* When we this for comp_unit_die () we have a DW_AT_name that might
7784 not start with a letter but with anything valid for filenames and
7785 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7786 character is not a letter. */
7787 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7788 clean_symbol_name (name);
7789
7790 p = name + strlen (name);
7791 for (i = 0; i < 4; i++)
7792 {
7793 sprintf (p, "%.2x", checksum[i]);
7794 p += 2;
7795 }
7796
7797 unit_die->die_id.die_symbol = xstrdup (name);
7798 }
7799
7800 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7801
7802 static int
7803 is_type_die (dw_die_ref die)
7804 {
7805 switch (die->die_tag)
7806 {
7807 case DW_TAG_array_type:
7808 case DW_TAG_class_type:
7809 case DW_TAG_interface_type:
7810 case DW_TAG_enumeration_type:
7811 case DW_TAG_pointer_type:
7812 case DW_TAG_reference_type:
7813 case DW_TAG_rvalue_reference_type:
7814 case DW_TAG_string_type:
7815 case DW_TAG_structure_type:
7816 case DW_TAG_subroutine_type:
7817 case DW_TAG_union_type:
7818 case DW_TAG_ptr_to_member_type:
7819 case DW_TAG_set_type:
7820 case DW_TAG_subrange_type:
7821 case DW_TAG_base_type:
7822 case DW_TAG_const_type:
7823 case DW_TAG_file_type:
7824 case DW_TAG_packed_type:
7825 case DW_TAG_volatile_type:
7826 case DW_TAG_typedef:
7827 return 1;
7828 default:
7829 return 0;
7830 }
7831 }
7832
7833 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7834 Basically, we want to choose the bits that are likely to be shared between
7835 compilations (types) and leave out the bits that are specific to individual
7836 compilations (functions). */
7837
7838 static int
7839 is_comdat_die (dw_die_ref c)
7840 {
7841 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7842 we do for stabs. The advantage is a greater likelihood of sharing between
7843 objects that don't include headers in the same order (and therefore would
7844 put the base types in a different comdat). jason 8/28/00 */
7845
7846 if (c->die_tag == DW_TAG_base_type)
7847 return 0;
7848
7849 if (c->die_tag == DW_TAG_pointer_type
7850 || c->die_tag == DW_TAG_reference_type
7851 || c->die_tag == DW_TAG_rvalue_reference_type
7852 || c->die_tag == DW_TAG_const_type
7853 || c->die_tag == DW_TAG_volatile_type)
7854 {
7855 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7856
7857 return t ? is_comdat_die (t) : 0;
7858 }
7859
7860 return is_type_die (c);
7861 }
7862
7863 /* Returns true iff C is a compile-unit DIE. */
7864
7865 static inline bool
7866 is_cu_die (dw_die_ref c)
7867 {
7868 return c && (c->die_tag == DW_TAG_compile_unit
7869 || c->die_tag == DW_TAG_skeleton_unit);
7870 }
7871
7872 /* Returns true iff C is a unit DIE of some sort. */
7873
7874 static inline bool
7875 is_unit_die (dw_die_ref c)
7876 {
7877 return c && (c->die_tag == DW_TAG_compile_unit
7878 || c->die_tag == DW_TAG_partial_unit
7879 || c->die_tag == DW_TAG_type_unit
7880 || c->die_tag == DW_TAG_skeleton_unit);
7881 }
7882
7883 /* Returns true iff C is a namespace DIE. */
7884
7885 static inline bool
7886 is_namespace_die (dw_die_ref c)
7887 {
7888 return c && c->die_tag == DW_TAG_namespace;
7889 }
7890
7891 /* Returns true iff C is a class or structure DIE. */
7892
7893 static inline bool
7894 is_class_die (dw_die_ref c)
7895 {
7896 return c && (c->die_tag == DW_TAG_class_type
7897 || c->die_tag == DW_TAG_structure_type);
7898 }
7899
7900 /* Return non-zero if this DIE is a template parameter. */
7901
7902 static inline bool
7903 is_template_parameter (dw_die_ref die)
7904 {
7905 switch (die->die_tag)
7906 {
7907 case DW_TAG_template_type_param:
7908 case DW_TAG_template_value_param:
7909 case DW_TAG_GNU_template_template_param:
7910 case DW_TAG_GNU_template_parameter_pack:
7911 return true;
7912 default:
7913 return false;
7914 }
7915 }
7916
7917 /* Return non-zero if this DIE represents a template instantiation. */
7918
7919 static inline bool
7920 is_template_instantiation (dw_die_ref die)
7921 {
7922 dw_die_ref c;
7923
7924 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7925 return false;
7926 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7927 return false;
7928 }
7929
7930 static char *
7931 gen_internal_sym (const char *prefix)
7932 {
7933 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7934
7935 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7936 return xstrdup (buf);
7937 }
7938
7939 /* Return non-zero if this DIE is a declaration. */
7940
7941 static int
7942 is_declaration_die (dw_die_ref die)
7943 {
7944 dw_attr_node *a;
7945 unsigned ix;
7946
7947 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7948 if (a->dw_attr == DW_AT_declaration)
7949 return 1;
7950
7951 return 0;
7952 }
7953
7954 /* Return non-zero if this DIE is nested inside a subprogram. */
7955
7956 static int
7957 is_nested_in_subprogram (dw_die_ref die)
7958 {
7959 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7960
7961 if (decl == NULL)
7962 decl = die;
7963 return local_scope_p (decl);
7964 }
7965
7966 /* Return non-zero if this DIE contains a defining declaration of a
7967 subprogram. */
7968
7969 static int
7970 contains_subprogram_definition (dw_die_ref die)
7971 {
7972 dw_die_ref c;
7973
7974 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7975 return 1;
7976 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7977 return 0;
7978 }
7979
7980 /* Return non-zero if this is a type DIE that should be moved to a
7981 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7982 unit type. */
7983
7984 static int
7985 should_move_die_to_comdat (dw_die_ref die)
7986 {
7987 switch (die->die_tag)
7988 {
7989 case DW_TAG_class_type:
7990 case DW_TAG_structure_type:
7991 case DW_TAG_enumeration_type:
7992 case DW_TAG_union_type:
7993 /* Don't move declarations, inlined instances, types nested in a
7994 subprogram, or types that contain subprogram definitions. */
7995 if (is_declaration_die (die)
7996 || get_AT (die, DW_AT_abstract_origin)
7997 || is_nested_in_subprogram (die)
7998 || contains_subprogram_definition (die))
7999 return 0;
8000 return 1;
8001 case DW_TAG_array_type:
8002 case DW_TAG_interface_type:
8003 case DW_TAG_pointer_type:
8004 case DW_TAG_reference_type:
8005 case DW_TAG_rvalue_reference_type:
8006 case DW_TAG_string_type:
8007 case DW_TAG_subroutine_type:
8008 case DW_TAG_ptr_to_member_type:
8009 case DW_TAG_set_type:
8010 case DW_TAG_subrange_type:
8011 case DW_TAG_base_type:
8012 case DW_TAG_const_type:
8013 case DW_TAG_file_type:
8014 case DW_TAG_packed_type:
8015 case DW_TAG_volatile_type:
8016 case DW_TAG_typedef:
8017 default:
8018 return 0;
8019 }
8020 }
8021
8022 /* Make a clone of DIE. */
8023
8024 static dw_die_ref
8025 clone_die (dw_die_ref die)
8026 {
8027 dw_die_ref clone = new_die_raw (die->die_tag);
8028 dw_attr_node *a;
8029 unsigned ix;
8030
8031 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8032 add_dwarf_attr (clone, a);
8033
8034 return clone;
8035 }
8036
8037 /* Make a clone of the tree rooted at DIE. */
8038
8039 static dw_die_ref
8040 clone_tree (dw_die_ref die)
8041 {
8042 dw_die_ref c;
8043 dw_die_ref clone = clone_die (die);
8044
8045 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8046
8047 return clone;
8048 }
8049
8050 /* Make a clone of DIE as a declaration. */
8051
8052 static dw_die_ref
8053 clone_as_declaration (dw_die_ref die)
8054 {
8055 dw_die_ref clone;
8056 dw_die_ref decl;
8057 dw_attr_node *a;
8058 unsigned ix;
8059
8060 /* If the DIE is already a declaration, just clone it. */
8061 if (is_declaration_die (die))
8062 return clone_die (die);
8063
8064 /* If the DIE is a specification, just clone its declaration DIE. */
8065 decl = get_AT_ref (die, DW_AT_specification);
8066 if (decl != NULL)
8067 {
8068 clone = clone_die (decl);
8069 if (die->comdat_type_p)
8070 add_AT_die_ref (clone, DW_AT_signature, die);
8071 return clone;
8072 }
8073
8074 clone = new_die_raw (die->die_tag);
8075
8076 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8077 {
8078 /* We don't want to copy over all attributes.
8079 For example we don't want DW_AT_byte_size because otherwise we will no
8080 longer have a declaration and GDB will treat it as a definition. */
8081
8082 switch (a->dw_attr)
8083 {
8084 case DW_AT_abstract_origin:
8085 case DW_AT_artificial:
8086 case DW_AT_containing_type:
8087 case DW_AT_external:
8088 case DW_AT_name:
8089 case DW_AT_type:
8090 case DW_AT_virtuality:
8091 case DW_AT_linkage_name:
8092 case DW_AT_MIPS_linkage_name:
8093 add_dwarf_attr (clone, a);
8094 break;
8095 case DW_AT_byte_size:
8096 case DW_AT_alignment:
8097 default:
8098 break;
8099 }
8100 }
8101
8102 if (die->comdat_type_p)
8103 add_AT_die_ref (clone, DW_AT_signature, die);
8104
8105 add_AT_flag (clone, DW_AT_declaration, 1);
8106 return clone;
8107 }
8108
8109
8110 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8111
8112 struct decl_table_entry
8113 {
8114 dw_die_ref orig;
8115 dw_die_ref copy;
8116 };
8117
8118 /* Helpers to manipulate hash table of copied declarations. */
8119
8120 /* Hashtable helpers. */
8121
8122 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8123 {
8124 typedef die_struct *compare_type;
8125 static inline hashval_t hash (const decl_table_entry *);
8126 static inline bool equal (const decl_table_entry *, const die_struct *);
8127 };
8128
8129 inline hashval_t
8130 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8131 {
8132 return htab_hash_pointer (entry->orig);
8133 }
8134
8135 inline bool
8136 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8137 const die_struct *entry2)
8138 {
8139 return entry1->orig == entry2;
8140 }
8141
8142 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8143
8144 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8145 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8146 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8147 to check if the ancestor has already been copied into UNIT. */
8148
8149 static dw_die_ref
8150 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8151 decl_hash_type *decl_table)
8152 {
8153 dw_die_ref parent = die->die_parent;
8154 dw_die_ref new_parent = unit;
8155 dw_die_ref copy;
8156 decl_table_entry **slot = NULL;
8157 struct decl_table_entry *entry = NULL;
8158
8159 if (decl_table)
8160 {
8161 /* Check if the entry has already been copied to UNIT. */
8162 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8163 INSERT);
8164 if (*slot != HTAB_EMPTY_ENTRY)
8165 {
8166 entry = *slot;
8167 return entry->copy;
8168 }
8169
8170 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8171 entry = XCNEW (struct decl_table_entry);
8172 entry->orig = die;
8173 entry->copy = NULL;
8174 *slot = entry;
8175 }
8176
8177 if (parent != NULL)
8178 {
8179 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8180 if (spec != NULL)
8181 parent = spec;
8182 if (!is_unit_die (parent))
8183 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8184 }
8185
8186 copy = clone_as_declaration (die);
8187 add_child_die (new_parent, copy);
8188
8189 if (decl_table)
8190 {
8191 /* Record the pointer to the copy. */
8192 entry->copy = copy;
8193 }
8194
8195 return copy;
8196 }
8197 /* Copy the declaration context to the new type unit DIE. This includes
8198 any surrounding namespace or type declarations. If the DIE has an
8199 AT_specification attribute, it also includes attributes and children
8200 attached to the specification, and returns a pointer to the original
8201 parent of the declaration DIE. Returns NULL otherwise. */
8202
8203 static dw_die_ref
8204 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8205 {
8206 dw_die_ref decl;
8207 dw_die_ref new_decl;
8208 dw_die_ref orig_parent = NULL;
8209
8210 decl = get_AT_ref (die, DW_AT_specification);
8211 if (decl == NULL)
8212 decl = die;
8213 else
8214 {
8215 unsigned ix;
8216 dw_die_ref c;
8217 dw_attr_node *a;
8218
8219 /* The original DIE will be changed to a declaration, and must
8220 be moved to be a child of the original declaration DIE. */
8221 orig_parent = decl->die_parent;
8222
8223 /* Copy the type node pointer from the new DIE to the original
8224 declaration DIE so we can forward references later. */
8225 decl->comdat_type_p = true;
8226 decl->die_id.die_type_node = die->die_id.die_type_node;
8227
8228 remove_AT (die, DW_AT_specification);
8229
8230 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8231 {
8232 if (a->dw_attr != DW_AT_name
8233 && a->dw_attr != DW_AT_declaration
8234 && a->dw_attr != DW_AT_external)
8235 add_dwarf_attr (die, a);
8236 }
8237
8238 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8239 }
8240
8241 if (decl->die_parent != NULL
8242 && !is_unit_die (decl->die_parent))
8243 {
8244 new_decl = copy_ancestor_tree (unit, decl, NULL);
8245 if (new_decl != NULL)
8246 {
8247 remove_AT (new_decl, DW_AT_signature);
8248 add_AT_specification (die, new_decl);
8249 }
8250 }
8251
8252 return orig_parent;
8253 }
8254
8255 /* Generate the skeleton ancestor tree for the given NODE, then clone
8256 the DIE and add the clone into the tree. */
8257
8258 static void
8259 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8260 {
8261 if (node->new_die != NULL)
8262 return;
8263
8264 node->new_die = clone_as_declaration (node->old_die);
8265
8266 if (node->parent != NULL)
8267 {
8268 generate_skeleton_ancestor_tree (node->parent);
8269 add_child_die (node->parent->new_die, node->new_die);
8270 }
8271 }
8272
8273 /* Generate a skeleton tree of DIEs containing any declarations that are
8274 found in the original tree. We traverse the tree looking for declaration
8275 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8276
8277 static void
8278 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8279 {
8280 skeleton_chain_node node;
8281 dw_die_ref c;
8282 dw_die_ref first;
8283 dw_die_ref prev = NULL;
8284 dw_die_ref next = NULL;
8285
8286 node.parent = parent;
8287
8288 first = c = parent->old_die->die_child;
8289 if (c)
8290 next = c->die_sib;
8291 if (c) do {
8292 if (prev == NULL || prev->die_sib == c)
8293 prev = c;
8294 c = next;
8295 next = (c == first ? NULL : c->die_sib);
8296 node.old_die = c;
8297 node.new_die = NULL;
8298 if (is_declaration_die (c))
8299 {
8300 if (is_template_instantiation (c))
8301 {
8302 /* Instantiated templates do not need to be cloned into the
8303 type unit. Just move the DIE and its children back to
8304 the skeleton tree (in the main CU). */
8305 remove_child_with_prev (c, prev);
8306 add_child_die (parent->new_die, c);
8307 c = prev;
8308 }
8309 else if (c->comdat_type_p)
8310 {
8311 /* This is the skeleton of earlier break_out_comdat_types
8312 type. Clone the existing DIE, but keep the children
8313 under the original (which is in the main CU). */
8314 dw_die_ref clone = clone_die (c);
8315
8316 replace_child (c, clone, prev);
8317 generate_skeleton_ancestor_tree (parent);
8318 add_child_die (parent->new_die, c);
8319 c = clone;
8320 continue;
8321 }
8322 else
8323 {
8324 /* Clone the existing DIE, move the original to the skeleton
8325 tree (which is in the main CU), and put the clone, with
8326 all the original's children, where the original came from
8327 (which is about to be moved to the type unit). */
8328 dw_die_ref clone = clone_die (c);
8329 move_all_children (c, clone);
8330
8331 /* If the original has a DW_AT_object_pointer attribute,
8332 it would now point to a child DIE just moved to the
8333 cloned tree, so we need to remove that attribute from
8334 the original. */
8335 remove_AT (c, DW_AT_object_pointer);
8336
8337 replace_child (c, clone, prev);
8338 generate_skeleton_ancestor_tree (parent);
8339 add_child_die (parent->new_die, c);
8340 node.old_die = clone;
8341 node.new_die = c;
8342 c = clone;
8343 }
8344 }
8345 generate_skeleton_bottom_up (&node);
8346 } while (next != NULL);
8347 }
8348
8349 /* Wrapper function for generate_skeleton_bottom_up. */
8350
8351 static dw_die_ref
8352 generate_skeleton (dw_die_ref die)
8353 {
8354 skeleton_chain_node node;
8355
8356 node.old_die = die;
8357 node.new_die = NULL;
8358 node.parent = NULL;
8359
8360 /* If this type definition is nested inside another type,
8361 and is not an instantiation of a template, always leave
8362 at least a declaration in its place. */
8363 if (die->die_parent != NULL
8364 && is_type_die (die->die_parent)
8365 && !is_template_instantiation (die))
8366 node.new_die = clone_as_declaration (die);
8367
8368 generate_skeleton_bottom_up (&node);
8369 return node.new_die;
8370 }
8371
8372 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8373 declaration. The original DIE is moved to a new compile unit so that
8374 existing references to it follow it to the new location. If any of the
8375 original DIE's descendants is a declaration, we need to replace the
8376 original DIE with a skeleton tree and move the declarations back into the
8377 skeleton tree. */
8378
8379 static dw_die_ref
8380 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8381 dw_die_ref prev)
8382 {
8383 dw_die_ref skeleton, orig_parent;
8384
8385 /* Copy the declaration context to the type unit DIE. If the returned
8386 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8387 that DIE. */
8388 orig_parent = copy_declaration_context (unit, child);
8389
8390 skeleton = generate_skeleton (child);
8391 if (skeleton == NULL)
8392 remove_child_with_prev (child, prev);
8393 else
8394 {
8395 skeleton->comdat_type_p = true;
8396 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8397
8398 /* If the original DIE was a specification, we need to put
8399 the skeleton under the parent DIE of the declaration.
8400 This leaves the original declaration in the tree, but
8401 it will be pruned later since there are no longer any
8402 references to it. */
8403 if (orig_parent != NULL)
8404 {
8405 remove_child_with_prev (child, prev);
8406 add_child_die (orig_parent, skeleton);
8407 }
8408 else
8409 replace_child (child, skeleton, prev);
8410 }
8411
8412 return skeleton;
8413 }
8414
8415 static void
8416 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8417 comdat_type_node *type_node,
8418 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8419
8420 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8421 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8422 DWARF procedure references in the DW_AT_location attribute. */
8423
8424 static dw_die_ref
8425 copy_dwarf_procedure (dw_die_ref die,
8426 comdat_type_node *type_node,
8427 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8428 {
8429 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8430
8431 /* DWARF procedures are not supposed to have children... */
8432 gcc_assert (die->die_child == NULL);
8433
8434 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8435 gcc_assert (vec_safe_length (die->die_attr) == 1
8436 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8437
8438 /* Do not copy more than once DWARF procedures. */
8439 bool existed;
8440 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8441 if (existed)
8442 return die_copy;
8443
8444 die_copy = clone_die (die);
8445 add_child_die (type_node->root_die, die_copy);
8446 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8447 return die_copy;
8448 }
8449
8450 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8451 procedures in DIE's attributes. */
8452
8453 static void
8454 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8455 comdat_type_node *type_node,
8456 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8457 {
8458 dw_attr_node *a;
8459 unsigned i;
8460
8461 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8462 {
8463 dw_loc_descr_ref loc;
8464
8465 if (a->dw_attr_val.val_class != dw_val_class_loc)
8466 continue;
8467
8468 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8469 {
8470 switch (loc->dw_loc_opc)
8471 {
8472 case DW_OP_call2:
8473 case DW_OP_call4:
8474 case DW_OP_call_ref:
8475 gcc_assert (loc->dw_loc_oprnd1.val_class
8476 == dw_val_class_die_ref);
8477 loc->dw_loc_oprnd1.v.val_die_ref.die
8478 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8479 type_node,
8480 copied_dwarf_procs);
8481
8482 default:
8483 break;
8484 }
8485 }
8486 }
8487 }
8488
8489 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8490 rewrite references to point to the copies.
8491
8492 References are looked for in DIE's attributes and recursively in all its
8493 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8494 mapping from old DWARF procedures to their copy. It is used not to copy
8495 twice the same DWARF procedure under TYPE_NODE. */
8496
8497 static void
8498 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8499 comdat_type_node *type_node,
8500 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8501 {
8502 dw_die_ref c;
8503
8504 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8505 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8506 type_node,
8507 copied_dwarf_procs));
8508 }
8509
8510 /* Traverse the DIE and set up additional .debug_types or .debug_info
8511 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8512 section. */
8513
8514 static void
8515 break_out_comdat_types (dw_die_ref die)
8516 {
8517 dw_die_ref c;
8518 dw_die_ref first;
8519 dw_die_ref prev = NULL;
8520 dw_die_ref next = NULL;
8521 dw_die_ref unit = NULL;
8522
8523 first = c = die->die_child;
8524 if (c)
8525 next = c->die_sib;
8526 if (c) do {
8527 if (prev == NULL || prev->die_sib == c)
8528 prev = c;
8529 c = next;
8530 next = (c == first ? NULL : c->die_sib);
8531 if (should_move_die_to_comdat (c))
8532 {
8533 dw_die_ref replacement;
8534 comdat_type_node *type_node;
8535
8536 /* Break out nested types into their own type units. */
8537 break_out_comdat_types (c);
8538
8539 /* Create a new type unit DIE as the root for the new tree, and
8540 add it to the list of comdat types. */
8541 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8542 add_AT_unsigned (unit, DW_AT_language,
8543 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8544 type_node = ggc_cleared_alloc<comdat_type_node> ();
8545 type_node->root_die = unit;
8546 type_node->next = comdat_type_list;
8547 comdat_type_list = type_node;
8548
8549 /* Generate the type signature. */
8550 generate_type_signature (c, type_node);
8551
8552 /* Copy the declaration context, attributes, and children of the
8553 declaration into the new type unit DIE, then remove this DIE
8554 from the main CU (or replace it with a skeleton if necessary). */
8555 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8556 type_node->skeleton_die = replacement;
8557
8558 /* Add the DIE to the new compunit. */
8559 add_child_die (unit, c);
8560
8561 /* Types can reference DWARF procedures for type size or data location
8562 expressions. Calls in DWARF expressions cannot target procedures
8563 that are not in the same section. So we must copy DWARF procedures
8564 along with this type and then rewrite references to them. */
8565 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8566 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8567
8568 if (replacement != NULL)
8569 c = replacement;
8570 }
8571 else if (c->die_tag == DW_TAG_namespace
8572 || c->die_tag == DW_TAG_class_type
8573 || c->die_tag == DW_TAG_structure_type
8574 || c->die_tag == DW_TAG_union_type)
8575 {
8576 /* Look for nested types that can be broken out. */
8577 break_out_comdat_types (c);
8578 }
8579 } while (next != NULL);
8580 }
8581
8582 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8583 Enter all the cloned children into the hash table decl_table. */
8584
8585 static dw_die_ref
8586 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8587 {
8588 dw_die_ref c;
8589 dw_die_ref clone;
8590 struct decl_table_entry *entry;
8591 decl_table_entry **slot;
8592
8593 if (die->die_tag == DW_TAG_subprogram)
8594 clone = clone_as_declaration (die);
8595 else
8596 clone = clone_die (die);
8597
8598 slot = decl_table->find_slot_with_hash (die,
8599 htab_hash_pointer (die), INSERT);
8600
8601 /* Assert that DIE isn't in the hash table yet. If it would be there
8602 before, the ancestors would be necessarily there as well, therefore
8603 clone_tree_partial wouldn't be called. */
8604 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8605
8606 entry = XCNEW (struct decl_table_entry);
8607 entry->orig = die;
8608 entry->copy = clone;
8609 *slot = entry;
8610
8611 if (die->die_tag != DW_TAG_subprogram)
8612 FOR_EACH_CHILD (die, c,
8613 add_child_die (clone, clone_tree_partial (c, decl_table)));
8614
8615 return clone;
8616 }
8617
8618 /* Walk the DIE and its children, looking for references to incomplete
8619 or trivial types that are unmarked (i.e., that are not in the current
8620 type_unit). */
8621
8622 static void
8623 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8624 {
8625 dw_die_ref c;
8626 dw_attr_node *a;
8627 unsigned ix;
8628
8629 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8630 {
8631 if (AT_class (a) == dw_val_class_die_ref)
8632 {
8633 dw_die_ref targ = AT_ref (a);
8634 decl_table_entry **slot;
8635 struct decl_table_entry *entry;
8636
8637 if (targ->die_mark != 0 || targ->comdat_type_p)
8638 continue;
8639
8640 slot = decl_table->find_slot_with_hash (targ,
8641 htab_hash_pointer (targ),
8642 INSERT);
8643
8644 if (*slot != HTAB_EMPTY_ENTRY)
8645 {
8646 /* TARG has already been copied, so we just need to
8647 modify the reference to point to the copy. */
8648 entry = *slot;
8649 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8650 }
8651 else
8652 {
8653 dw_die_ref parent = unit;
8654 dw_die_ref copy = clone_die (targ);
8655
8656 /* Record in DECL_TABLE that TARG has been copied.
8657 Need to do this now, before the recursive call,
8658 because DECL_TABLE may be expanded and SLOT
8659 would no longer be a valid pointer. */
8660 entry = XCNEW (struct decl_table_entry);
8661 entry->orig = targ;
8662 entry->copy = copy;
8663 *slot = entry;
8664
8665 /* If TARG is not a declaration DIE, we need to copy its
8666 children. */
8667 if (!is_declaration_die (targ))
8668 {
8669 FOR_EACH_CHILD (
8670 targ, c,
8671 add_child_die (copy,
8672 clone_tree_partial (c, decl_table)));
8673 }
8674
8675 /* Make sure the cloned tree is marked as part of the
8676 type unit. */
8677 mark_dies (copy);
8678
8679 /* If TARG has surrounding context, copy its ancestor tree
8680 into the new type unit. */
8681 if (targ->die_parent != NULL
8682 && !is_unit_die (targ->die_parent))
8683 parent = copy_ancestor_tree (unit, targ->die_parent,
8684 decl_table);
8685
8686 add_child_die (parent, copy);
8687 a->dw_attr_val.v.val_die_ref.die = copy;
8688
8689 /* Make sure the newly-copied DIE is walked. If it was
8690 installed in a previously-added context, it won't
8691 get visited otherwise. */
8692 if (parent != unit)
8693 {
8694 /* Find the highest point of the newly-added tree,
8695 mark each node along the way, and walk from there. */
8696 parent->die_mark = 1;
8697 while (parent->die_parent
8698 && parent->die_parent->die_mark == 0)
8699 {
8700 parent = parent->die_parent;
8701 parent->die_mark = 1;
8702 }
8703 copy_decls_walk (unit, parent, decl_table);
8704 }
8705 }
8706 }
8707 }
8708
8709 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8710 }
8711
8712 /* Copy declarations for "unworthy" types into the new comdat section.
8713 Incomplete types, modified types, and certain other types aren't broken
8714 out into comdat sections of their own, so they don't have a signature,
8715 and we need to copy the declaration into the same section so that we
8716 don't have an external reference. */
8717
8718 static void
8719 copy_decls_for_unworthy_types (dw_die_ref unit)
8720 {
8721 mark_dies (unit);
8722 decl_hash_type decl_table (10);
8723 copy_decls_walk (unit, unit, &decl_table);
8724 unmark_dies (unit);
8725 }
8726
8727 /* Traverse the DIE and add a sibling attribute if it may have the
8728 effect of speeding up access to siblings. To save some space,
8729 avoid generating sibling attributes for DIE's without children. */
8730
8731 static void
8732 add_sibling_attributes (dw_die_ref die)
8733 {
8734 dw_die_ref c;
8735
8736 if (! die->die_child)
8737 return;
8738
8739 if (die->die_parent && die != die->die_parent->die_child)
8740 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8741
8742 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8743 }
8744
8745 /* Output all location lists for the DIE and its children. */
8746
8747 static void
8748 output_location_lists (dw_die_ref die)
8749 {
8750 dw_die_ref c;
8751 dw_attr_node *a;
8752 unsigned ix;
8753
8754 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8755 if (AT_class (a) == dw_val_class_loc_list)
8756 output_loc_list (AT_loc_list (a));
8757
8758 FOR_EACH_CHILD (die, c, output_location_lists (c));
8759 }
8760
8761 /* During assign_location_list_indexes and output_loclists_offset the
8762 current index, after it the number of assigned indexes (i.e. how
8763 large the .debug_loclists* offset table should be). */
8764 static unsigned int loc_list_idx;
8765
8766 /* Output all location list offsets for the DIE and its children. */
8767
8768 static void
8769 output_loclists_offsets (dw_die_ref die)
8770 {
8771 dw_die_ref c;
8772 dw_attr_node *a;
8773 unsigned ix;
8774
8775 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8776 if (AT_class (a) == dw_val_class_loc_list)
8777 {
8778 dw_loc_list_ref l = AT_loc_list (a);
8779 if (l->offset_emitted)
8780 continue;
8781 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8782 loc_section_label, NULL);
8783 gcc_assert (l->hash == loc_list_idx);
8784 loc_list_idx++;
8785 l->offset_emitted = true;
8786 }
8787
8788 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8789 }
8790
8791 /* Recursively set indexes of location lists. */
8792
8793 static void
8794 assign_location_list_indexes (dw_die_ref die)
8795 {
8796 dw_die_ref c;
8797 dw_attr_node *a;
8798 unsigned ix;
8799
8800 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8801 if (AT_class (a) == dw_val_class_loc_list)
8802 {
8803 dw_loc_list_ref list = AT_loc_list (a);
8804 if (!list->num_assigned)
8805 {
8806 list->num_assigned = true;
8807 list->hash = loc_list_idx++;
8808 }
8809 }
8810
8811 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8812 }
8813
8814 /* We want to limit the number of external references, because they are
8815 larger than local references: a relocation takes multiple words, and
8816 even a sig8 reference is always eight bytes, whereas a local reference
8817 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8818 So if we encounter multiple external references to the same type DIE, we
8819 make a local typedef stub for it and redirect all references there.
8820
8821 This is the element of the hash table for keeping track of these
8822 references. */
8823
8824 struct external_ref
8825 {
8826 dw_die_ref type;
8827 dw_die_ref stub;
8828 unsigned n_refs;
8829 };
8830
8831 /* Hashtable helpers. */
8832
8833 struct external_ref_hasher : free_ptr_hash <external_ref>
8834 {
8835 static inline hashval_t hash (const external_ref *);
8836 static inline bool equal (const external_ref *, const external_ref *);
8837 };
8838
8839 inline hashval_t
8840 external_ref_hasher::hash (const external_ref *r)
8841 {
8842 dw_die_ref die = r->type;
8843 hashval_t h = 0;
8844
8845 /* We can't use the address of the DIE for hashing, because
8846 that will make the order of the stub DIEs non-deterministic. */
8847 if (! die->comdat_type_p)
8848 /* We have a symbol; use it to compute a hash. */
8849 h = htab_hash_string (die->die_id.die_symbol);
8850 else
8851 {
8852 /* We have a type signature; use a subset of the bits as the hash.
8853 The 8-byte signature is at least as large as hashval_t. */
8854 comdat_type_node *type_node = die->die_id.die_type_node;
8855 memcpy (&h, type_node->signature, sizeof (h));
8856 }
8857 return h;
8858 }
8859
8860 inline bool
8861 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8862 {
8863 return r1->type == r2->type;
8864 }
8865
8866 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8867
8868 /* Return a pointer to the external_ref for references to DIE. */
8869
8870 static struct external_ref *
8871 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8872 {
8873 struct external_ref ref, *ref_p;
8874 external_ref **slot;
8875
8876 ref.type = die;
8877 slot = map->find_slot (&ref, INSERT);
8878 if (*slot != HTAB_EMPTY_ENTRY)
8879 return *slot;
8880
8881 ref_p = XCNEW (struct external_ref);
8882 ref_p->type = die;
8883 *slot = ref_p;
8884 return ref_p;
8885 }
8886
8887 /* Subroutine of optimize_external_refs, below.
8888
8889 If we see a type skeleton, record it as our stub. If we see external
8890 references, remember how many we've seen. */
8891
8892 static void
8893 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8894 {
8895 dw_die_ref c;
8896 dw_attr_node *a;
8897 unsigned ix;
8898 struct external_ref *ref_p;
8899
8900 if (is_type_die (die)
8901 && (c = get_AT_ref (die, DW_AT_signature)))
8902 {
8903 /* This is a local skeleton; use it for local references. */
8904 ref_p = lookup_external_ref (map, c);
8905 ref_p->stub = die;
8906 }
8907
8908 /* Scan the DIE references, and remember any that refer to DIEs from
8909 other CUs (i.e. those which are not marked). */
8910 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8911 if (AT_class (a) == dw_val_class_die_ref
8912 && (c = AT_ref (a))->die_mark == 0
8913 && is_type_die (c))
8914 {
8915 ref_p = lookup_external_ref (map, c);
8916 ref_p->n_refs++;
8917 }
8918
8919 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8920 }
8921
8922 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8923 points to an external_ref, DATA is the CU we're processing. If we don't
8924 already have a local stub, and we have multiple refs, build a stub. */
8925
8926 int
8927 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8928 {
8929 struct external_ref *ref_p = *slot;
8930
8931 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8932 {
8933 /* We have multiple references to this type, so build a small stub.
8934 Both of these forms are a bit dodgy from the perspective of the
8935 DWARF standard, since technically they should have names. */
8936 dw_die_ref cu = data;
8937 dw_die_ref type = ref_p->type;
8938 dw_die_ref stub = NULL;
8939
8940 if (type->comdat_type_p)
8941 {
8942 /* If we refer to this type via sig8, use AT_signature. */
8943 stub = new_die (type->die_tag, cu, NULL_TREE);
8944 add_AT_die_ref (stub, DW_AT_signature, type);
8945 }
8946 else
8947 {
8948 /* Otherwise, use a typedef with no name. */
8949 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8950 add_AT_die_ref (stub, DW_AT_type, type);
8951 }
8952
8953 stub->die_mark++;
8954 ref_p->stub = stub;
8955 }
8956 return 1;
8957 }
8958
8959 /* DIE is a unit; look through all the DIE references to see if there are
8960 any external references to types, and if so, create local stubs for
8961 them which will be applied in build_abbrev_table. This is useful because
8962 references to local DIEs are smaller. */
8963
8964 static external_ref_hash_type *
8965 optimize_external_refs (dw_die_ref die)
8966 {
8967 external_ref_hash_type *map = new external_ref_hash_type (10);
8968 optimize_external_refs_1 (die, map);
8969 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8970 return map;
8971 }
8972
8973 /* The following 3 variables are temporaries that are computed only during the
8974 build_abbrev_table call and used and released during the following
8975 optimize_abbrev_table call. */
8976
8977 /* First abbrev_id that can be optimized based on usage. */
8978 static unsigned int abbrev_opt_start;
8979
8980 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8981 abbrev_id smaller than this, because they must be already sized
8982 during build_abbrev_table). */
8983 static unsigned int abbrev_opt_base_type_end;
8984
8985 /* Vector of usage counts during build_abbrev_table. Indexed by
8986 abbrev_id - abbrev_opt_start. */
8987 static vec<unsigned int> abbrev_usage_count;
8988
8989 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8990 static vec<dw_die_ref> sorted_abbrev_dies;
8991
8992 /* The format of each DIE (and its attribute value pairs) is encoded in an
8993 abbreviation table. This routine builds the abbreviation table and assigns
8994 a unique abbreviation id for each abbreviation entry. The children of each
8995 die are visited recursively. */
8996
8997 static void
8998 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8999 {
9000 unsigned int abbrev_id = 0;
9001 dw_die_ref c;
9002 dw_attr_node *a;
9003 unsigned ix;
9004 dw_die_ref abbrev;
9005
9006 /* Scan the DIE references, and replace any that refer to
9007 DIEs from other CUs (i.e. those which are not marked) with
9008 the local stubs we built in optimize_external_refs. */
9009 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9010 if (AT_class (a) == dw_val_class_die_ref
9011 && (c = AT_ref (a))->die_mark == 0)
9012 {
9013 struct external_ref *ref_p;
9014 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9015
9016 ref_p = lookup_external_ref (extern_map, c);
9017 if (ref_p->stub && ref_p->stub != die)
9018 change_AT_die_ref (a, ref_p->stub);
9019 else
9020 /* We aren't changing this reference, so mark it external. */
9021 set_AT_ref_external (a, 1);
9022 }
9023
9024 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9025 {
9026 dw_attr_node *die_a, *abbrev_a;
9027 unsigned ix;
9028 bool ok = true;
9029
9030 if (abbrev_id == 0)
9031 continue;
9032 if (abbrev->die_tag != die->die_tag)
9033 continue;
9034 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9035 continue;
9036
9037 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9038 continue;
9039
9040 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9041 {
9042 abbrev_a = &(*abbrev->die_attr)[ix];
9043 if ((abbrev_a->dw_attr != die_a->dw_attr)
9044 || (value_format (abbrev_a) != value_format (die_a)))
9045 {
9046 ok = false;
9047 break;
9048 }
9049 }
9050 if (ok)
9051 break;
9052 }
9053
9054 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9055 {
9056 vec_safe_push (abbrev_die_table, die);
9057 if (abbrev_opt_start)
9058 abbrev_usage_count.safe_push (0);
9059 }
9060 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9061 {
9062 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9063 sorted_abbrev_dies.safe_push (die);
9064 }
9065
9066 die->die_abbrev = abbrev_id;
9067 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9068 }
9069
9070 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9071 by die_abbrev's usage count, from the most commonly used
9072 abbreviation to the least. */
9073
9074 static int
9075 die_abbrev_cmp (const void *p1, const void *p2)
9076 {
9077 dw_die_ref die1 = *(const dw_die_ref *) p1;
9078 dw_die_ref die2 = *(const dw_die_ref *) p2;
9079
9080 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9081 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9082
9083 if (die1->die_abbrev >= abbrev_opt_base_type_end
9084 && die2->die_abbrev >= abbrev_opt_base_type_end)
9085 {
9086 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9087 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9088 return -1;
9089 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9090 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9091 return 1;
9092 }
9093
9094 /* Stabilize the sort. */
9095 if (die1->die_abbrev < die2->die_abbrev)
9096 return -1;
9097 if (die1->die_abbrev > die2->die_abbrev)
9098 return 1;
9099
9100 return 0;
9101 }
9102
9103 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9104 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9105 into dw_val_class_const_implicit or
9106 dw_val_class_unsigned_const_implicit. */
9107
9108 static void
9109 optimize_implicit_const (unsigned int first_id, unsigned int end,
9110 vec<bool> &implicit_consts)
9111 {
9112 /* It never makes sense if there is just one DIE using the abbreviation. */
9113 if (end < first_id + 2)
9114 return;
9115
9116 dw_attr_node *a;
9117 unsigned ix, i;
9118 dw_die_ref die = sorted_abbrev_dies[first_id];
9119 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9120 if (implicit_consts[ix])
9121 {
9122 enum dw_val_class new_class = dw_val_class_none;
9123 switch (AT_class (a))
9124 {
9125 case dw_val_class_unsigned_const:
9126 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9127 continue;
9128
9129 /* The .debug_abbrev section will grow by
9130 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9131 in all the DIEs using that abbreviation. */
9132 if (constant_size (AT_unsigned (a)) * (end - first_id)
9133 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9134 continue;
9135
9136 new_class = dw_val_class_unsigned_const_implicit;
9137 break;
9138
9139 case dw_val_class_const:
9140 new_class = dw_val_class_const_implicit;
9141 break;
9142
9143 case dw_val_class_file:
9144 new_class = dw_val_class_file_implicit;
9145 break;
9146
9147 default:
9148 continue;
9149 }
9150 for (i = first_id; i < end; i++)
9151 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9152 = new_class;
9153 }
9154 }
9155
9156 /* Attempt to optimize abbreviation table from abbrev_opt_start
9157 abbreviation above. */
9158
9159 static void
9160 optimize_abbrev_table (void)
9161 {
9162 if (abbrev_opt_start
9163 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9164 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9165 {
9166 auto_vec<bool, 32> implicit_consts;
9167 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9168
9169 unsigned int abbrev_id = abbrev_opt_start - 1;
9170 unsigned int first_id = ~0U;
9171 unsigned int last_abbrev_id = 0;
9172 unsigned int i;
9173 dw_die_ref die;
9174 if (abbrev_opt_base_type_end > abbrev_opt_start)
9175 abbrev_id = abbrev_opt_base_type_end - 1;
9176 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9177 most commonly used abbreviations come first. */
9178 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9179 {
9180 dw_attr_node *a;
9181 unsigned ix;
9182
9183 /* If calc_base_type_die_sizes has been called, the CU and
9184 base types after it can't be optimized, because we've already
9185 calculated their DIE offsets. We've sorted them first. */
9186 if (die->die_abbrev < abbrev_opt_base_type_end)
9187 continue;
9188 if (die->die_abbrev != last_abbrev_id)
9189 {
9190 last_abbrev_id = die->die_abbrev;
9191 if (dwarf_version >= 5 && first_id != ~0U)
9192 optimize_implicit_const (first_id, i, implicit_consts);
9193 abbrev_id++;
9194 (*abbrev_die_table)[abbrev_id] = die;
9195 if (dwarf_version >= 5)
9196 {
9197 first_id = i;
9198 implicit_consts.truncate (0);
9199
9200 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9201 switch (AT_class (a))
9202 {
9203 case dw_val_class_const:
9204 case dw_val_class_unsigned_const:
9205 case dw_val_class_file:
9206 implicit_consts.safe_push (true);
9207 break;
9208 default:
9209 implicit_consts.safe_push (false);
9210 break;
9211 }
9212 }
9213 }
9214 else if (dwarf_version >= 5)
9215 {
9216 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9217 if (!implicit_consts[ix])
9218 continue;
9219 else
9220 {
9221 dw_attr_node *other_a
9222 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9223 if (!dw_val_equal_p (&a->dw_attr_val,
9224 &other_a->dw_attr_val))
9225 implicit_consts[ix] = false;
9226 }
9227 }
9228 die->die_abbrev = abbrev_id;
9229 }
9230 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9231 if (dwarf_version >= 5 && first_id != ~0U)
9232 optimize_implicit_const (first_id, i, implicit_consts);
9233 }
9234
9235 abbrev_opt_start = 0;
9236 abbrev_opt_base_type_end = 0;
9237 abbrev_usage_count.release ();
9238 sorted_abbrev_dies.release ();
9239 }
9240 \f
9241 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9242
9243 static int
9244 constant_size (unsigned HOST_WIDE_INT value)
9245 {
9246 int log;
9247
9248 if (value == 0)
9249 log = 0;
9250 else
9251 log = floor_log2 (value);
9252
9253 log = log / 8;
9254 log = 1 << (floor_log2 (log) + 1);
9255
9256 return log;
9257 }
9258
9259 /* Return the size of a DIE as it is represented in the
9260 .debug_info section. */
9261
9262 static unsigned long
9263 size_of_die (dw_die_ref die)
9264 {
9265 unsigned long size = 0;
9266 dw_attr_node *a;
9267 unsigned ix;
9268 enum dwarf_form form;
9269
9270 size += size_of_uleb128 (die->die_abbrev);
9271 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9272 {
9273 switch (AT_class (a))
9274 {
9275 case dw_val_class_addr:
9276 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9277 {
9278 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9279 size += size_of_uleb128 (AT_index (a));
9280 }
9281 else
9282 size += DWARF2_ADDR_SIZE;
9283 break;
9284 case dw_val_class_offset:
9285 size += DWARF_OFFSET_SIZE;
9286 break;
9287 case dw_val_class_loc:
9288 {
9289 unsigned long lsize = size_of_locs (AT_loc (a));
9290
9291 /* Block length. */
9292 if (dwarf_version >= 4)
9293 size += size_of_uleb128 (lsize);
9294 else
9295 size += constant_size (lsize);
9296 size += lsize;
9297 }
9298 break;
9299 case dw_val_class_loc_list:
9300 case dw_val_class_view_list:
9301 if (dwarf_split_debug_info && dwarf_version >= 5)
9302 {
9303 gcc_assert (AT_loc_list (a)->num_assigned);
9304 size += size_of_uleb128 (AT_loc_list (a)->hash);
9305 }
9306 else
9307 size += DWARF_OFFSET_SIZE;
9308 break;
9309 case dw_val_class_range_list:
9310 if (value_format (a) == DW_FORM_rnglistx)
9311 {
9312 gcc_assert (rnglist_idx);
9313 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9314 size += size_of_uleb128 (r->idx);
9315 }
9316 else
9317 size += DWARF_OFFSET_SIZE;
9318 break;
9319 case dw_val_class_const:
9320 size += size_of_sleb128 (AT_int (a));
9321 break;
9322 case dw_val_class_unsigned_const:
9323 {
9324 int csize = constant_size (AT_unsigned (a));
9325 if (dwarf_version == 3
9326 && a->dw_attr == DW_AT_data_member_location
9327 && csize >= 4)
9328 size += size_of_uleb128 (AT_unsigned (a));
9329 else
9330 size += csize;
9331 }
9332 break;
9333 case dw_val_class_symview:
9334 if (symview_upper_bound <= 0xff)
9335 size += 1;
9336 else if (symview_upper_bound <= 0xffff)
9337 size += 2;
9338 else if (symview_upper_bound <= 0xffffffff)
9339 size += 4;
9340 else
9341 size += 8;
9342 break;
9343 case dw_val_class_const_implicit:
9344 case dw_val_class_unsigned_const_implicit:
9345 case dw_val_class_file_implicit:
9346 /* These occupy no size in the DIE, just an extra sleb128 in
9347 .debug_abbrev. */
9348 break;
9349 case dw_val_class_const_double:
9350 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9351 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9352 size++; /* block */
9353 break;
9354 case dw_val_class_wide_int:
9355 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9356 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9357 if (get_full_len (*a->dw_attr_val.v.val_wide)
9358 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9359 size++; /* block */
9360 break;
9361 case dw_val_class_vec:
9362 size += constant_size (a->dw_attr_val.v.val_vec.length
9363 * a->dw_attr_val.v.val_vec.elt_size)
9364 + a->dw_attr_val.v.val_vec.length
9365 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9366 break;
9367 case dw_val_class_flag:
9368 if (dwarf_version >= 4)
9369 /* Currently all add_AT_flag calls pass in 1 as last argument,
9370 so DW_FORM_flag_present can be used. If that ever changes,
9371 we'll need to use DW_FORM_flag and have some optimization
9372 in build_abbrev_table that will change those to
9373 DW_FORM_flag_present if it is set to 1 in all DIEs using
9374 the same abbrev entry. */
9375 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9376 else
9377 size += 1;
9378 break;
9379 case dw_val_class_die_ref:
9380 if (AT_ref_external (a))
9381 {
9382 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9383 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9384 is sized by target address length, whereas in DWARF3
9385 it's always sized as an offset. */
9386 if (use_debug_types)
9387 size += DWARF_TYPE_SIGNATURE_SIZE;
9388 else if (dwarf_version == 2)
9389 size += DWARF2_ADDR_SIZE;
9390 else
9391 size += DWARF_OFFSET_SIZE;
9392 }
9393 else
9394 size += DWARF_OFFSET_SIZE;
9395 break;
9396 case dw_val_class_fde_ref:
9397 size += DWARF_OFFSET_SIZE;
9398 break;
9399 case dw_val_class_lbl_id:
9400 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9401 {
9402 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9403 size += size_of_uleb128 (AT_index (a));
9404 }
9405 else
9406 size += DWARF2_ADDR_SIZE;
9407 break;
9408 case dw_val_class_lineptr:
9409 case dw_val_class_macptr:
9410 case dw_val_class_loclistsptr:
9411 size += DWARF_OFFSET_SIZE;
9412 break;
9413 case dw_val_class_str:
9414 form = AT_string_form (a);
9415 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9416 size += DWARF_OFFSET_SIZE;
9417 else if (form == DW_FORM_GNU_str_index)
9418 size += size_of_uleb128 (AT_index (a));
9419 else
9420 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9421 break;
9422 case dw_val_class_file:
9423 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9424 break;
9425 case dw_val_class_data8:
9426 size += 8;
9427 break;
9428 case dw_val_class_vms_delta:
9429 size += DWARF_OFFSET_SIZE;
9430 break;
9431 case dw_val_class_high_pc:
9432 size += DWARF2_ADDR_SIZE;
9433 break;
9434 case dw_val_class_discr_value:
9435 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9436 break;
9437 case dw_val_class_discr_list:
9438 {
9439 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9440
9441 /* This is a block, so we have the block length and then its
9442 data. */
9443 size += constant_size (block_size) + block_size;
9444 }
9445 break;
9446 default:
9447 gcc_unreachable ();
9448 }
9449 }
9450
9451 return size;
9452 }
9453
9454 /* Size the debugging information associated with a given DIE. Visits the
9455 DIE's children recursively. Updates the global variable next_die_offset, on
9456 each time through. Uses the current value of next_die_offset to update the
9457 die_offset field in each DIE. */
9458
9459 static void
9460 calc_die_sizes (dw_die_ref die)
9461 {
9462 dw_die_ref c;
9463
9464 gcc_assert (die->die_offset == 0
9465 || (unsigned long int) die->die_offset == next_die_offset);
9466 die->die_offset = next_die_offset;
9467 next_die_offset += size_of_die (die);
9468
9469 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9470
9471 if (die->die_child != NULL)
9472 /* Count the null byte used to terminate sibling lists. */
9473 next_die_offset += 1;
9474 }
9475
9476 /* Size just the base type children at the start of the CU.
9477 This is needed because build_abbrev needs to size locs
9478 and sizing of type based stack ops needs to know die_offset
9479 values for the base types. */
9480
9481 static void
9482 calc_base_type_die_sizes (void)
9483 {
9484 unsigned long die_offset = (dwarf_split_debug_info
9485 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9486 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9487 unsigned int i;
9488 dw_die_ref base_type;
9489 #if ENABLE_ASSERT_CHECKING
9490 dw_die_ref prev = comp_unit_die ()->die_child;
9491 #endif
9492
9493 die_offset += size_of_die (comp_unit_die ());
9494 for (i = 0; base_types.iterate (i, &base_type); i++)
9495 {
9496 #if ENABLE_ASSERT_CHECKING
9497 gcc_assert (base_type->die_offset == 0
9498 && prev->die_sib == base_type
9499 && base_type->die_child == NULL
9500 && base_type->die_abbrev);
9501 prev = base_type;
9502 #endif
9503 if (abbrev_opt_start
9504 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9505 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9506 base_type->die_offset = die_offset;
9507 die_offset += size_of_die (base_type);
9508 }
9509 }
9510
9511 /* Set the marks for a die and its children. We do this so
9512 that we know whether or not a reference needs to use FORM_ref_addr; only
9513 DIEs in the same CU will be marked. We used to clear out the offset
9514 and use that as the flag, but ran into ordering problems. */
9515
9516 static void
9517 mark_dies (dw_die_ref die)
9518 {
9519 dw_die_ref c;
9520
9521 gcc_assert (!die->die_mark);
9522
9523 die->die_mark = 1;
9524 FOR_EACH_CHILD (die, c, mark_dies (c));
9525 }
9526
9527 /* Clear the marks for a die and its children. */
9528
9529 static void
9530 unmark_dies (dw_die_ref die)
9531 {
9532 dw_die_ref c;
9533
9534 if (! use_debug_types)
9535 gcc_assert (die->die_mark);
9536
9537 die->die_mark = 0;
9538 FOR_EACH_CHILD (die, c, unmark_dies (c));
9539 }
9540
9541 /* Clear the marks for a die, its children and referred dies. */
9542
9543 static void
9544 unmark_all_dies (dw_die_ref die)
9545 {
9546 dw_die_ref c;
9547 dw_attr_node *a;
9548 unsigned ix;
9549
9550 if (!die->die_mark)
9551 return;
9552 die->die_mark = 0;
9553
9554 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9555
9556 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9557 if (AT_class (a) == dw_val_class_die_ref)
9558 unmark_all_dies (AT_ref (a));
9559 }
9560
9561 /* Calculate if the entry should appear in the final output file. It may be
9562 from a pruned a type. */
9563
9564 static bool
9565 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9566 {
9567 /* By limiting gnu pubnames to definitions only, gold can generate a
9568 gdb index without entries for declarations, which don't include
9569 enough information to be useful. */
9570 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9571 return false;
9572
9573 if (table == pubname_table)
9574 {
9575 /* Enumerator names are part of the pubname table, but the
9576 parent DW_TAG_enumeration_type die may have been pruned.
9577 Don't output them if that is the case. */
9578 if (p->die->die_tag == DW_TAG_enumerator &&
9579 (p->die->die_parent == NULL
9580 || !p->die->die_parent->die_perennial_p))
9581 return false;
9582
9583 /* Everything else in the pubname table is included. */
9584 return true;
9585 }
9586
9587 /* The pubtypes table shouldn't include types that have been
9588 pruned. */
9589 return (p->die->die_offset != 0
9590 || !flag_eliminate_unused_debug_types);
9591 }
9592
9593 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9594 generated for the compilation unit. */
9595
9596 static unsigned long
9597 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9598 {
9599 unsigned long size;
9600 unsigned i;
9601 pubname_entry *p;
9602 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9603
9604 size = DWARF_PUBNAMES_HEADER_SIZE;
9605 FOR_EACH_VEC_ELT (*names, i, p)
9606 if (include_pubname_in_output (names, p))
9607 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9608
9609 size += DWARF_OFFSET_SIZE;
9610 return size;
9611 }
9612
9613 /* Return the size of the information in the .debug_aranges section. */
9614
9615 static unsigned long
9616 size_of_aranges (void)
9617 {
9618 unsigned long size;
9619
9620 size = DWARF_ARANGES_HEADER_SIZE;
9621
9622 /* Count the address/length pair for this compilation unit. */
9623 if (text_section_used)
9624 size += 2 * DWARF2_ADDR_SIZE;
9625 if (cold_text_section_used)
9626 size += 2 * DWARF2_ADDR_SIZE;
9627 if (have_multiple_function_sections)
9628 {
9629 unsigned fde_idx;
9630 dw_fde_ref fde;
9631
9632 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9633 {
9634 if (DECL_IGNORED_P (fde->decl))
9635 continue;
9636 if (!fde->in_std_section)
9637 size += 2 * DWARF2_ADDR_SIZE;
9638 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9639 size += 2 * DWARF2_ADDR_SIZE;
9640 }
9641 }
9642
9643 /* Count the two zero words used to terminated the address range table. */
9644 size += 2 * DWARF2_ADDR_SIZE;
9645 return size;
9646 }
9647 \f
9648 /* Select the encoding of an attribute value. */
9649
9650 static enum dwarf_form
9651 value_format (dw_attr_node *a)
9652 {
9653 switch (AT_class (a))
9654 {
9655 case dw_val_class_addr:
9656 /* Only very few attributes allow DW_FORM_addr. */
9657 switch (a->dw_attr)
9658 {
9659 case DW_AT_low_pc:
9660 case DW_AT_high_pc:
9661 case DW_AT_entry_pc:
9662 case DW_AT_trampoline:
9663 return (AT_index (a) == NOT_INDEXED
9664 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9665 default:
9666 break;
9667 }
9668 switch (DWARF2_ADDR_SIZE)
9669 {
9670 case 1:
9671 return DW_FORM_data1;
9672 case 2:
9673 return DW_FORM_data2;
9674 case 4:
9675 return DW_FORM_data4;
9676 case 8:
9677 return DW_FORM_data8;
9678 default:
9679 gcc_unreachable ();
9680 }
9681 case dw_val_class_loc_list:
9682 case dw_val_class_view_list:
9683 if (dwarf_split_debug_info
9684 && dwarf_version >= 5
9685 && AT_loc_list (a)->num_assigned)
9686 return DW_FORM_loclistx;
9687 /* FALLTHRU */
9688 case dw_val_class_range_list:
9689 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9690 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9691 care about sizes of .debug* sections in shared libraries and
9692 executables and don't take into account relocations that affect just
9693 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9694 table in the .debug_rnglists section. */
9695 if (dwarf_split_debug_info
9696 && dwarf_version >= 5
9697 && AT_class (a) == dw_val_class_range_list
9698 && rnglist_idx
9699 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9700 return DW_FORM_rnglistx;
9701 if (dwarf_version >= 4)
9702 return DW_FORM_sec_offset;
9703 /* FALLTHRU */
9704 case dw_val_class_vms_delta:
9705 case dw_val_class_offset:
9706 switch (DWARF_OFFSET_SIZE)
9707 {
9708 case 4:
9709 return DW_FORM_data4;
9710 case 8:
9711 return DW_FORM_data8;
9712 default:
9713 gcc_unreachable ();
9714 }
9715 case dw_val_class_loc:
9716 if (dwarf_version >= 4)
9717 return DW_FORM_exprloc;
9718 switch (constant_size (size_of_locs (AT_loc (a))))
9719 {
9720 case 1:
9721 return DW_FORM_block1;
9722 case 2:
9723 return DW_FORM_block2;
9724 case 4:
9725 return DW_FORM_block4;
9726 default:
9727 gcc_unreachable ();
9728 }
9729 case dw_val_class_const:
9730 return DW_FORM_sdata;
9731 case dw_val_class_unsigned_const:
9732 switch (constant_size (AT_unsigned (a)))
9733 {
9734 case 1:
9735 return DW_FORM_data1;
9736 case 2:
9737 return DW_FORM_data2;
9738 case 4:
9739 /* In DWARF3 DW_AT_data_member_location with
9740 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9741 constant, so we need to use DW_FORM_udata if we need
9742 a large constant. */
9743 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9744 return DW_FORM_udata;
9745 return DW_FORM_data4;
9746 case 8:
9747 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9748 return DW_FORM_udata;
9749 return DW_FORM_data8;
9750 default:
9751 gcc_unreachable ();
9752 }
9753 case dw_val_class_const_implicit:
9754 case dw_val_class_unsigned_const_implicit:
9755 case dw_val_class_file_implicit:
9756 return DW_FORM_implicit_const;
9757 case dw_val_class_const_double:
9758 switch (HOST_BITS_PER_WIDE_INT)
9759 {
9760 case 8:
9761 return DW_FORM_data2;
9762 case 16:
9763 return DW_FORM_data4;
9764 case 32:
9765 return DW_FORM_data8;
9766 case 64:
9767 if (dwarf_version >= 5)
9768 return DW_FORM_data16;
9769 /* FALLTHRU */
9770 default:
9771 return DW_FORM_block1;
9772 }
9773 case dw_val_class_wide_int:
9774 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9775 {
9776 case 8:
9777 return DW_FORM_data1;
9778 case 16:
9779 return DW_FORM_data2;
9780 case 32:
9781 return DW_FORM_data4;
9782 case 64:
9783 return DW_FORM_data8;
9784 case 128:
9785 if (dwarf_version >= 5)
9786 return DW_FORM_data16;
9787 /* FALLTHRU */
9788 default:
9789 return DW_FORM_block1;
9790 }
9791 case dw_val_class_symview:
9792 /* ??? We might use uleb128, but then we'd have to compute
9793 .debug_info offsets in the assembler. */
9794 if (symview_upper_bound <= 0xff)
9795 return DW_FORM_data1;
9796 else if (symview_upper_bound <= 0xffff)
9797 return DW_FORM_data2;
9798 else if (symview_upper_bound <= 0xffffffff)
9799 return DW_FORM_data4;
9800 else
9801 return DW_FORM_data8;
9802 case dw_val_class_vec:
9803 switch (constant_size (a->dw_attr_val.v.val_vec.length
9804 * a->dw_attr_val.v.val_vec.elt_size))
9805 {
9806 case 1:
9807 return DW_FORM_block1;
9808 case 2:
9809 return DW_FORM_block2;
9810 case 4:
9811 return DW_FORM_block4;
9812 default:
9813 gcc_unreachable ();
9814 }
9815 case dw_val_class_flag:
9816 if (dwarf_version >= 4)
9817 {
9818 /* Currently all add_AT_flag calls pass in 1 as last argument,
9819 so DW_FORM_flag_present can be used. If that ever changes,
9820 we'll need to use DW_FORM_flag and have some optimization
9821 in build_abbrev_table that will change those to
9822 DW_FORM_flag_present if it is set to 1 in all DIEs using
9823 the same abbrev entry. */
9824 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9825 return DW_FORM_flag_present;
9826 }
9827 return DW_FORM_flag;
9828 case dw_val_class_die_ref:
9829 if (AT_ref_external (a))
9830 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9831 else
9832 return DW_FORM_ref;
9833 case dw_val_class_fde_ref:
9834 return DW_FORM_data;
9835 case dw_val_class_lbl_id:
9836 return (AT_index (a) == NOT_INDEXED
9837 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9838 case dw_val_class_lineptr:
9839 case dw_val_class_macptr:
9840 case dw_val_class_loclistsptr:
9841 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9842 case dw_val_class_str:
9843 return AT_string_form (a);
9844 case dw_val_class_file:
9845 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9846 {
9847 case 1:
9848 return DW_FORM_data1;
9849 case 2:
9850 return DW_FORM_data2;
9851 case 4:
9852 return DW_FORM_data4;
9853 default:
9854 gcc_unreachable ();
9855 }
9856
9857 case dw_val_class_data8:
9858 return DW_FORM_data8;
9859
9860 case dw_val_class_high_pc:
9861 switch (DWARF2_ADDR_SIZE)
9862 {
9863 case 1:
9864 return DW_FORM_data1;
9865 case 2:
9866 return DW_FORM_data2;
9867 case 4:
9868 return DW_FORM_data4;
9869 case 8:
9870 return DW_FORM_data8;
9871 default:
9872 gcc_unreachable ();
9873 }
9874
9875 case dw_val_class_discr_value:
9876 return (a->dw_attr_val.v.val_discr_value.pos
9877 ? DW_FORM_udata
9878 : DW_FORM_sdata);
9879 case dw_val_class_discr_list:
9880 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9881 {
9882 case 1:
9883 return DW_FORM_block1;
9884 case 2:
9885 return DW_FORM_block2;
9886 case 4:
9887 return DW_FORM_block4;
9888 default:
9889 gcc_unreachable ();
9890 }
9891
9892 default:
9893 gcc_unreachable ();
9894 }
9895 }
9896
9897 /* Output the encoding of an attribute value. */
9898
9899 static void
9900 output_value_format (dw_attr_node *a)
9901 {
9902 enum dwarf_form form = value_format (a);
9903
9904 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9905 }
9906
9907 /* Given a die and id, produce the appropriate abbreviations. */
9908
9909 static void
9910 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9911 {
9912 unsigned ix;
9913 dw_attr_node *a_attr;
9914
9915 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9916 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9917 dwarf_tag_name (abbrev->die_tag));
9918
9919 if (abbrev->die_child != NULL)
9920 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9921 else
9922 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9923
9924 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9925 {
9926 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9927 dwarf_attr_name (a_attr->dw_attr));
9928 output_value_format (a_attr);
9929 if (value_format (a_attr) == DW_FORM_implicit_const)
9930 {
9931 if (AT_class (a_attr) == dw_val_class_file_implicit)
9932 {
9933 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9934 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9935 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9936 }
9937 else
9938 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9939 }
9940 }
9941
9942 dw2_asm_output_data (1, 0, NULL);
9943 dw2_asm_output_data (1, 0, NULL);
9944 }
9945
9946
9947 /* Output the .debug_abbrev section which defines the DIE abbreviation
9948 table. */
9949
9950 static void
9951 output_abbrev_section (void)
9952 {
9953 unsigned int abbrev_id;
9954 dw_die_ref abbrev;
9955
9956 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9957 if (abbrev_id != 0)
9958 output_die_abbrevs (abbrev_id, abbrev);
9959
9960 /* Terminate the table. */
9961 dw2_asm_output_data (1, 0, NULL);
9962 }
9963
9964 /* Return a new location list, given the begin and end range, and the
9965 expression. */
9966
9967 static inline dw_loc_list_ref
9968 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9969 const char *end, var_loc_view vend,
9970 const char *section)
9971 {
9972 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9973
9974 retlist->begin = begin;
9975 retlist->begin_entry = NULL;
9976 retlist->end = end;
9977 retlist->expr = expr;
9978 retlist->section = section;
9979 retlist->vbegin = vbegin;
9980 retlist->vend = vend;
9981
9982 return retlist;
9983 }
9984
9985 /* Return true iff there's any nonzero view number in the loc list. */
9986
9987 static bool
9988 loc_list_has_views (dw_loc_list_ref list)
9989 {
9990 if (!debug_variable_location_views)
9991 return false;
9992
9993 for (dw_loc_list_ref loc = list;
9994 loc != NULL; loc = loc->dw_loc_next)
9995 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
9996 return true;
9997
9998 return false;
9999 }
10000
10001 /* Generate a new internal symbol for this location list node, if it
10002 hasn't got one yet. */
10003
10004 static inline void
10005 gen_llsym (dw_loc_list_ref list)
10006 {
10007 gcc_assert (!list->ll_symbol);
10008 list->ll_symbol = gen_internal_sym ("LLST");
10009
10010 if (!loc_list_has_views (list))
10011 return;
10012
10013 if (dwarf2out_locviews_in_attribute ())
10014 {
10015 /* Use the same label_num for the view list. */
10016 label_num--;
10017 list->vl_symbol = gen_internal_sym ("LVUS");
10018 }
10019 else
10020 list->vl_symbol = list->ll_symbol;
10021 }
10022
10023 /* Generate a symbol for the list, but only if we really want to emit
10024 it as a list. */
10025
10026 static inline void
10027 maybe_gen_llsym (dw_loc_list_ref list)
10028 {
10029 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10030 return;
10031
10032 gen_llsym (list);
10033 }
10034
10035 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10036 NULL, don't consider size of the location expression. If we're not
10037 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10038 representation in *SIZEP. */
10039
10040 static bool
10041 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10042 {
10043 /* Don't output an entry that starts and ends at the same address. */
10044 if (strcmp (curr->begin, curr->end) == 0
10045 && curr->vbegin == curr->vend && !curr->force)
10046 return true;
10047
10048 if (!sizep)
10049 return false;
10050
10051 unsigned long size = size_of_locs (curr->expr);
10052
10053 /* If the expression is too large, drop it on the floor. We could
10054 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10055 in the expression, but >= 64KB expressions for a single value
10056 in a single range are unlikely very useful. */
10057 if (dwarf_version < 5 && size > 0xffff)
10058 return true;
10059
10060 *sizep = size;
10061
10062 return false;
10063 }
10064
10065 /* Output a view pair loclist entry for CURR, if it requires one. */
10066
10067 static void
10068 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10069 {
10070 if (!dwarf2out_locviews_in_loclist ())
10071 return;
10072
10073 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10074 return;
10075
10076 #ifdef DW_LLE_view_pair
10077 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10078
10079 if (dwarf2out_as_locview_support)
10080 {
10081 if (ZERO_VIEW_P (curr->vbegin))
10082 dw2_asm_output_data_uleb128 (0, "Location view begin");
10083 else
10084 {
10085 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10086 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10087 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10088 }
10089
10090 if (ZERO_VIEW_P (curr->vend))
10091 dw2_asm_output_data_uleb128 (0, "Location view end");
10092 else
10093 {
10094 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10095 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10096 dw2_asm_output_symname_uleb128 (label, "Location view end");
10097 }
10098 }
10099 else
10100 {
10101 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10102 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10103 }
10104 #endif /* DW_LLE_view_pair */
10105
10106 return;
10107 }
10108
10109 /* Output the location list given to us. */
10110
10111 static void
10112 output_loc_list (dw_loc_list_ref list_head)
10113 {
10114 int vcount = 0, lcount = 0;
10115
10116 if (list_head->emitted)
10117 return;
10118 list_head->emitted = true;
10119
10120 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10121 {
10122 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10123
10124 for (dw_loc_list_ref curr = list_head; curr != NULL;
10125 curr = curr->dw_loc_next)
10126 {
10127 unsigned long size;
10128
10129 if (skip_loc_list_entry (curr, &size))
10130 continue;
10131
10132 vcount++;
10133
10134 /* ?? dwarf_split_debug_info? */
10135 if (dwarf2out_as_locview_support)
10136 {
10137 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10138
10139 if (!ZERO_VIEW_P (curr->vbegin))
10140 {
10141 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10142 dw2_asm_output_symname_uleb128 (label,
10143 "View list begin (%s)",
10144 list_head->vl_symbol);
10145 }
10146 else
10147 dw2_asm_output_data_uleb128 (0,
10148 "View list begin (%s)",
10149 list_head->vl_symbol);
10150
10151 if (!ZERO_VIEW_P (curr->vend))
10152 {
10153 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10154 dw2_asm_output_symname_uleb128 (label,
10155 "View list end (%s)",
10156 list_head->vl_symbol);
10157 }
10158 else
10159 dw2_asm_output_data_uleb128 (0,
10160 "View list end (%s)",
10161 list_head->vl_symbol);
10162 }
10163 else
10164 {
10165 dw2_asm_output_data_uleb128 (curr->vbegin,
10166 "View list begin (%s)",
10167 list_head->vl_symbol);
10168 dw2_asm_output_data_uleb128 (curr->vend,
10169 "View list end (%s)",
10170 list_head->vl_symbol);
10171 }
10172 }
10173 }
10174
10175 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10176
10177 const char *last_section = NULL;
10178 const char *base_label = NULL;
10179
10180 /* Walk the location list, and output each range + expression. */
10181 for (dw_loc_list_ref curr = list_head; curr != NULL;
10182 curr = curr->dw_loc_next)
10183 {
10184 unsigned long size;
10185
10186 /* Skip this entry? If we skip it here, we must skip it in the
10187 view list above as well. */
10188 if (skip_loc_list_entry (curr, &size))
10189 continue;
10190
10191 lcount++;
10192
10193 if (dwarf_version >= 5)
10194 {
10195 if (dwarf_split_debug_info)
10196 {
10197 dwarf2out_maybe_output_loclist_view_pair (curr);
10198 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10199 uleb128 index into .debug_addr and uleb128 length. */
10200 dw2_asm_output_data (1, DW_LLE_startx_length,
10201 "DW_LLE_startx_length (%s)",
10202 list_head->ll_symbol);
10203 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10204 "Location list range start index "
10205 "(%s)", curr->begin);
10206 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10207 For that case we probably need to emit DW_LLE_startx_endx,
10208 but we'd need 2 .debug_addr entries rather than just one. */
10209 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10210 "Location list length (%s)",
10211 list_head->ll_symbol);
10212 }
10213 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10214 {
10215 dwarf2out_maybe_output_loclist_view_pair (curr);
10216 /* If all code is in .text section, the base address is
10217 already provided by the CU attributes. Use
10218 DW_LLE_offset_pair where both addresses are uleb128 encoded
10219 offsets against that base. */
10220 dw2_asm_output_data (1, DW_LLE_offset_pair,
10221 "DW_LLE_offset_pair (%s)",
10222 list_head->ll_symbol);
10223 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10224 "Location list begin address (%s)",
10225 list_head->ll_symbol);
10226 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10227 "Location list end address (%s)",
10228 list_head->ll_symbol);
10229 }
10230 else if (HAVE_AS_LEB128)
10231 {
10232 /* Otherwise, find out how many consecutive entries could share
10233 the same base entry. If just one, emit DW_LLE_start_length,
10234 otherwise emit DW_LLE_base_address for the base address
10235 followed by a series of DW_LLE_offset_pair. */
10236 if (last_section == NULL || curr->section != last_section)
10237 {
10238 dw_loc_list_ref curr2;
10239 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10240 curr2 = curr2->dw_loc_next)
10241 {
10242 if (strcmp (curr2->begin, curr2->end) == 0
10243 && !curr2->force)
10244 continue;
10245 break;
10246 }
10247 if (curr2 == NULL || curr->section != curr2->section)
10248 last_section = NULL;
10249 else
10250 {
10251 last_section = curr->section;
10252 base_label = curr->begin;
10253 dw2_asm_output_data (1, DW_LLE_base_address,
10254 "DW_LLE_base_address (%s)",
10255 list_head->ll_symbol);
10256 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10257 "Base address (%s)",
10258 list_head->ll_symbol);
10259 }
10260 }
10261 /* Only one entry with the same base address. Use
10262 DW_LLE_start_length with absolute address and uleb128
10263 length. */
10264 if (last_section == NULL)
10265 {
10266 dwarf2out_maybe_output_loclist_view_pair (curr);
10267 dw2_asm_output_data (1, DW_LLE_start_length,
10268 "DW_LLE_start_length (%s)",
10269 list_head->ll_symbol);
10270 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10271 "Location list begin address (%s)",
10272 list_head->ll_symbol);
10273 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10274 "Location list length "
10275 "(%s)", list_head->ll_symbol);
10276 }
10277 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10278 DW_LLE_base_address. */
10279 else
10280 {
10281 dwarf2out_maybe_output_loclist_view_pair (curr);
10282 dw2_asm_output_data (1, DW_LLE_offset_pair,
10283 "DW_LLE_offset_pair (%s)",
10284 list_head->ll_symbol);
10285 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10286 "Location list begin address "
10287 "(%s)", list_head->ll_symbol);
10288 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10289 "Location list end address "
10290 "(%s)", list_head->ll_symbol);
10291 }
10292 }
10293 /* The assembler does not support .uleb128 directive. Emit
10294 DW_LLE_start_end with a pair of absolute addresses. */
10295 else
10296 {
10297 dwarf2out_maybe_output_loclist_view_pair (curr);
10298 dw2_asm_output_data (1, DW_LLE_start_end,
10299 "DW_LLE_start_end (%s)",
10300 list_head->ll_symbol);
10301 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10302 "Location list begin address (%s)",
10303 list_head->ll_symbol);
10304 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10305 "Location list end address (%s)",
10306 list_head->ll_symbol);
10307 }
10308 }
10309 else if (dwarf_split_debug_info)
10310 {
10311 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10312 and 4 byte length. */
10313 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10314 "Location list start/length entry (%s)",
10315 list_head->ll_symbol);
10316 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10317 "Location list range start index (%s)",
10318 curr->begin);
10319 /* The length field is 4 bytes. If we ever need to support
10320 an 8-byte length, we can add a new DW_LLE code or fall back
10321 to DW_LLE_GNU_start_end_entry. */
10322 dw2_asm_output_delta (4, curr->end, curr->begin,
10323 "Location list range length (%s)",
10324 list_head->ll_symbol);
10325 }
10326 else if (!have_multiple_function_sections)
10327 {
10328 /* Pair of relative addresses against start of text section. */
10329 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10330 "Location list begin address (%s)",
10331 list_head->ll_symbol);
10332 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10333 "Location list end address (%s)",
10334 list_head->ll_symbol);
10335 }
10336 else
10337 {
10338 /* Pair of absolute addresses. */
10339 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10340 "Location list begin address (%s)",
10341 list_head->ll_symbol);
10342 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10343 "Location list end address (%s)",
10344 list_head->ll_symbol);
10345 }
10346
10347 /* Output the block length for this list of location operations. */
10348 if (dwarf_version >= 5)
10349 dw2_asm_output_data_uleb128 (size, "Location expression size");
10350 else
10351 {
10352 gcc_assert (size <= 0xffff);
10353 dw2_asm_output_data (2, size, "Location expression size");
10354 }
10355
10356 output_loc_sequence (curr->expr, -1);
10357 }
10358
10359 /* And finally list termination. */
10360 if (dwarf_version >= 5)
10361 dw2_asm_output_data (1, DW_LLE_end_of_list,
10362 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10363 else if (dwarf_split_debug_info)
10364 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10365 "Location list terminator (%s)",
10366 list_head->ll_symbol);
10367 else
10368 {
10369 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10370 "Location list terminator begin (%s)",
10371 list_head->ll_symbol);
10372 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10373 "Location list terminator end (%s)",
10374 list_head->ll_symbol);
10375 }
10376
10377 gcc_assert (!list_head->vl_symbol
10378 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10379 }
10380
10381 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10382 section. Emit a relocated reference if val_entry is NULL, otherwise,
10383 emit an indirect reference. */
10384
10385 static void
10386 output_range_list_offset (dw_attr_node *a)
10387 {
10388 const char *name = dwarf_attr_name (a->dw_attr);
10389
10390 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10391 {
10392 if (dwarf_version >= 5)
10393 {
10394 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10395 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10396 debug_ranges_section, "%s", name);
10397 }
10398 else
10399 {
10400 char *p = strchr (ranges_section_label, '\0');
10401 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10402 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10403 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10404 debug_ranges_section, "%s", name);
10405 *p = '\0';
10406 }
10407 }
10408 else if (dwarf_version >= 5)
10409 {
10410 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10411 gcc_assert (rnglist_idx);
10412 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10413 }
10414 else
10415 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10416 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10417 "%s (offset from %s)", name, ranges_section_label);
10418 }
10419
10420 /* Output the offset into the debug_loc section. */
10421
10422 static void
10423 output_loc_list_offset (dw_attr_node *a)
10424 {
10425 char *sym = AT_loc_list (a)->ll_symbol;
10426
10427 gcc_assert (sym);
10428 if (!dwarf_split_debug_info)
10429 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10430 "%s", dwarf_attr_name (a->dw_attr));
10431 else if (dwarf_version >= 5)
10432 {
10433 gcc_assert (AT_loc_list (a)->num_assigned);
10434 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10435 dwarf_attr_name (a->dw_attr),
10436 sym);
10437 }
10438 else
10439 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10440 "%s", dwarf_attr_name (a->dw_attr));
10441 }
10442
10443 /* Output the offset into the debug_loc section. */
10444
10445 static void
10446 output_view_list_offset (dw_attr_node *a)
10447 {
10448 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10449
10450 gcc_assert (sym);
10451 if (dwarf_split_debug_info)
10452 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10453 "%s", dwarf_attr_name (a->dw_attr));
10454 else
10455 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10456 "%s", dwarf_attr_name (a->dw_attr));
10457 }
10458
10459 /* Output an attribute's index or value appropriately. */
10460
10461 static void
10462 output_attr_index_or_value (dw_attr_node *a)
10463 {
10464 const char *name = dwarf_attr_name (a->dw_attr);
10465
10466 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10467 {
10468 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10469 return;
10470 }
10471 switch (AT_class (a))
10472 {
10473 case dw_val_class_addr:
10474 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10475 break;
10476 case dw_val_class_high_pc:
10477 case dw_val_class_lbl_id:
10478 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10479 break;
10480 default:
10481 gcc_unreachable ();
10482 }
10483 }
10484
10485 /* Output a type signature. */
10486
10487 static inline void
10488 output_signature (const char *sig, const char *name)
10489 {
10490 int i;
10491
10492 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10493 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10494 }
10495
10496 /* Output a discriminant value. */
10497
10498 static inline void
10499 output_discr_value (dw_discr_value *discr_value, const char *name)
10500 {
10501 if (discr_value->pos)
10502 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10503 else
10504 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10505 }
10506
10507 /* Output the DIE and its attributes. Called recursively to generate
10508 the definitions of each child DIE. */
10509
10510 static void
10511 output_die (dw_die_ref die)
10512 {
10513 dw_attr_node *a;
10514 dw_die_ref c;
10515 unsigned long size;
10516 unsigned ix;
10517
10518 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10519 (unsigned long)die->die_offset,
10520 dwarf_tag_name (die->die_tag));
10521
10522 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10523 {
10524 const char *name = dwarf_attr_name (a->dw_attr);
10525
10526 switch (AT_class (a))
10527 {
10528 case dw_val_class_addr:
10529 output_attr_index_or_value (a);
10530 break;
10531
10532 case dw_val_class_offset:
10533 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10534 "%s", name);
10535 break;
10536
10537 case dw_val_class_range_list:
10538 output_range_list_offset (a);
10539 break;
10540
10541 case dw_val_class_loc:
10542 size = size_of_locs (AT_loc (a));
10543
10544 /* Output the block length for this list of location operations. */
10545 if (dwarf_version >= 4)
10546 dw2_asm_output_data_uleb128 (size, "%s", name);
10547 else
10548 dw2_asm_output_data (constant_size (size), size, "%s", name);
10549
10550 output_loc_sequence (AT_loc (a), -1);
10551 break;
10552
10553 case dw_val_class_const:
10554 /* ??? It would be slightly more efficient to use a scheme like is
10555 used for unsigned constants below, but gdb 4.x does not sign
10556 extend. Gdb 5.x does sign extend. */
10557 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10558 break;
10559
10560 case dw_val_class_unsigned_const:
10561 {
10562 int csize = constant_size (AT_unsigned (a));
10563 if (dwarf_version == 3
10564 && a->dw_attr == DW_AT_data_member_location
10565 && csize >= 4)
10566 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10567 else
10568 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10569 }
10570 break;
10571
10572 case dw_val_class_symview:
10573 {
10574 int vsize;
10575 if (symview_upper_bound <= 0xff)
10576 vsize = 1;
10577 else if (symview_upper_bound <= 0xffff)
10578 vsize = 2;
10579 else if (symview_upper_bound <= 0xffffffff)
10580 vsize = 4;
10581 else
10582 vsize = 8;
10583 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10584 "%s", name);
10585 }
10586 break;
10587
10588 case dw_val_class_const_implicit:
10589 if (flag_debug_asm)
10590 fprintf (asm_out_file, "\t\t\t%s %s ("
10591 HOST_WIDE_INT_PRINT_DEC ")\n",
10592 ASM_COMMENT_START, name, AT_int (a));
10593 break;
10594
10595 case dw_val_class_unsigned_const_implicit:
10596 if (flag_debug_asm)
10597 fprintf (asm_out_file, "\t\t\t%s %s ("
10598 HOST_WIDE_INT_PRINT_HEX ")\n",
10599 ASM_COMMENT_START, name, AT_unsigned (a));
10600 break;
10601
10602 case dw_val_class_const_double:
10603 {
10604 unsigned HOST_WIDE_INT first, second;
10605
10606 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10607 dw2_asm_output_data (1,
10608 HOST_BITS_PER_DOUBLE_INT
10609 / HOST_BITS_PER_CHAR,
10610 NULL);
10611
10612 if (WORDS_BIG_ENDIAN)
10613 {
10614 first = a->dw_attr_val.v.val_double.high;
10615 second = a->dw_attr_val.v.val_double.low;
10616 }
10617 else
10618 {
10619 first = a->dw_attr_val.v.val_double.low;
10620 second = a->dw_attr_val.v.val_double.high;
10621 }
10622
10623 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10624 first, "%s", name);
10625 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10626 second, NULL);
10627 }
10628 break;
10629
10630 case dw_val_class_wide_int:
10631 {
10632 int i;
10633 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10634 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10635 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10636 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10637 * l, NULL);
10638
10639 if (WORDS_BIG_ENDIAN)
10640 for (i = len - 1; i >= 0; --i)
10641 {
10642 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10643 "%s", name);
10644 name = "";
10645 }
10646 else
10647 for (i = 0; i < len; ++i)
10648 {
10649 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10650 "%s", name);
10651 name = "";
10652 }
10653 }
10654 break;
10655
10656 case dw_val_class_vec:
10657 {
10658 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10659 unsigned int len = a->dw_attr_val.v.val_vec.length;
10660 unsigned int i;
10661 unsigned char *p;
10662
10663 dw2_asm_output_data (constant_size (len * elt_size),
10664 len * elt_size, "%s", name);
10665 if (elt_size > sizeof (HOST_WIDE_INT))
10666 {
10667 elt_size /= 2;
10668 len *= 2;
10669 }
10670 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10671 i < len;
10672 i++, p += elt_size)
10673 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10674 "fp or vector constant word %u", i);
10675 break;
10676 }
10677
10678 case dw_val_class_flag:
10679 if (dwarf_version >= 4)
10680 {
10681 /* Currently all add_AT_flag calls pass in 1 as last argument,
10682 so DW_FORM_flag_present can be used. If that ever changes,
10683 we'll need to use DW_FORM_flag and have some optimization
10684 in build_abbrev_table that will change those to
10685 DW_FORM_flag_present if it is set to 1 in all DIEs using
10686 the same abbrev entry. */
10687 gcc_assert (AT_flag (a) == 1);
10688 if (flag_debug_asm)
10689 fprintf (asm_out_file, "\t\t\t%s %s\n",
10690 ASM_COMMENT_START, name);
10691 break;
10692 }
10693 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10694 break;
10695
10696 case dw_val_class_loc_list:
10697 output_loc_list_offset (a);
10698 break;
10699
10700 case dw_val_class_view_list:
10701 output_view_list_offset (a);
10702 break;
10703
10704 case dw_val_class_die_ref:
10705 if (AT_ref_external (a))
10706 {
10707 if (AT_ref (a)->comdat_type_p)
10708 {
10709 comdat_type_node *type_node
10710 = AT_ref (a)->die_id.die_type_node;
10711
10712 gcc_assert (type_node);
10713 output_signature (type_node->signature, name);
10714 }
10715 else
10716 {
10717 const char *sym = AT_ref (a)->die_id.die_symbol;
10718 int size;
10719
10720 gcc_assert (sym);
10721 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10722 length, whereas in DWARF3 it's always sized as an
10723 offset. */
10724 if (dwarf_version == 2)
10725 size = DWARF2_ADDR_SIZE;
10726 else
10727 size = DWARF_OFFSET_SIZE;
10728 /* ??? We cannot unconditionally output die_offset if
10729 non-zero - others might create references to those
10730 DIEs via symbols.
10731 And we do not clear its DIE offset after outputting it
10732 (and the label refers to the actual DIEs, not the
10733 DWARF CU unit header which is when using label + offset
10734 would be the correct thing to do).
10735 ??? This is the reason for the with_offset flag. */
10736 if (AT_ref (a)->with_offset)
10737 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10738 debug_info_section, "%s", name);
10739 else
10740 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10741 name);
10742 }
10743 }
10744 else
10745 {
10746 gcc_assert (AT_ref (a)->die_offset);
10747 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10748 "%s", name);
10749 }
10750 break;
10751
10752 case dw_val_class_fde_ref:
10753 {
10754 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10755
10756 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10757 a->dw_attr_val.v.val_fde_index * 2);
10758 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10759 "%s", name);
10760 }
10761 break;
10762
10763 case dw_val_class_vms_delta:
10764 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10765 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10766 AT_vms_delta2 (a), AT_vms_delta1 (a),
10767 "%s", name);
10768 #else
10769 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10770 AT_vms_delta2 (a), AT_vms_delta1 (a),
10771 "%s", name);
10772 #endif
10773 break;
10774
10775 case dw_val_class_lbl_id:
10776 output_attr_index_or_value (a);
10777 break;
10778
10779 case dw_val_class_lineptr:
10780 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10781 debug_line_section, "%s", name);
10782 break;
10783
10784 case dw_val_class_macptr:
10785 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10786 debug_macinfo_section, "%s", name);
10787 break;
10788
10789 case dw_val_class_loclistsptr:
10790 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10791 debug_loc_section, "%s", name);
10792 break;
10793
10794 case dw_val_class_str:
10795 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10796 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10797 a->dw_attr_val.v.val_str->label,
10798 debug_str_section,
10799 "%s: \"%s\"", name, AT_string (a));
10800 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10801 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10802 a->dw_attr_val.v.val_str->label,
10803 debug_line_str_section,
10804 "%s: \"%s\"", name, AT_string (a));
10805 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10806 dw2_asm_output_data_uleb128 (AT_index (a),
10807 "%s: \"%s\"", name, AT_string (a));
10808 else
10809 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10810 break;
10811
10812 case dw_val_class_file:
10813 {
10814 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10815
10816 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10817 a->dw_attr_val.v.val_file->filename);
10818 break;
10819 }
10820
10821 case dw_val_class_file_implicit:
10822 if (flag_debug_asm)
10823 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10824 ASM_COMMENT_START, name,
10825 maybe_emit_file (a->dw_attr_val.v.val_file),
10826 a->dw_attr_val.v.val_file->filename);
10827 break;
10828
10829 case dw_val_class_data8:
10830 {
10831 int i;
10832
10833 for (i = 0; i < 8; i++)
10834 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10835 i == 0 ? "%s" : NULL, name);
10836 break;
10837 }
10838
10839 case dw_val_class_high_pc:
10840 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10841 get_AT_low_pc (die), "DW_AT_high_pc");
10842 break;
10843
10844 case dw_val_class_discr_value:
10845 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10846 break;
10847
10848 case dw_val_class_discr_list:
10849 {
10850 dw_discr_list_ref list = AT_discr_list (a);
10851 const int size = size_of_discr_list (list);
10852
10853 /* This is a block, so output its length first. */
10854 dw2_asm_output_data (constant_size (size), size,
10855 "%s: block size", name);
10856
10857 for (; list != NULL; list = list->dw_discr_next)
10858 {
10859 /* One byte for the discriminant value descriptor, and then as
10860 many LEB128 numbers as required. */
10861 if (list->dw_discr_range)
10862 dw2_asm_output_data (1, DW_DSC_range,
10863 "%s: DW_DSC_range", name);
10864 else
10865 dw2_asm_output_data (1, DW_DSC_label,
10866 "%s: DW_DSC_label", name);
10867
10868 output_discr_value (&list->dw_discr_lower_bound, name);
10869 if (list->dw_discr_range)
10870 output_discr_value (&list->dw_discr_upper_bound, name);
10871 }
10872 break;
10873 }
10874
10875 default:
10876 gcc_unreachable ();
10877 }
10878 }
10879
10880 FOR_EACH_CHILD (die, c, output_die (c));
10881
10882 /* Add null byte to terminate sibling list. */
10883 if (die->die_child != NULL)
10884 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10885 (unsigned long) die->die_offset);
10886 }
10887
10888 /* Output the dwarf version number. */
10889
10890 static void
10891 output_dwarf_version ()
10892 {
10893 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10894 views in loclist. That will change eventually. */
10895 if (dwarf_version == 6)
10896 {
10897 static bool once;
10898 if (!once)
10899 {
10900 warning (0,
10901 "-gdwarf-6 is output as version 5 with incompatibilities");
10902 once = true;
10903 }
10904 dw2_asm_output_data (2, 5, "DWARF version number");
10905 }
10906 else
10907 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10908 }
10909
10910 /* Output the compilation unit that appears at the beginning of the
10911 .debug_info section, and precedes the DIE descriptions. */
10912
10913 static void
10914 output_compilation_unit_header (enum dwarf_unit_type ut)
10915 {
10916 if (!XCOFF_DEBUGGING_INFO)
10917 {
10918 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10919 dw2_asm_output_data (4, 0xffffffff,
10920 "Initial length escape value indicating 64-bit DWARF extension");
10921 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10922 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10923 "Length of Compilation Unit Info");
10924 }
10925
10926 output_dwarf_version ();
10927 if (dwarf_version >= 5)
10928 {
10929 const char *name;
10930 switch (ut)
10931 {
10932 case DW_UT_compile: name = "DW_UT_compile"; break;
10933 case DW_UT_type: name = "DW_UT_type"; break;
10934 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10935 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10936 default: gcc_unreachable ();
10937 }
10938 dw2_asm_output_data (1, ut, "%s", name);
10939 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10940 }
10941 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10942 debug_abbrev_section,
10943 "Offset Into Abbrev. Section");
10944 if (dwarf_version < 5)
10945 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10946 }
10947
10948 /* Output the compilation unit DIE and its children. */
10949
10950 static void
10951 output_comp_unit (dw_die_ref die, int output_if_empty,
10952 const unsigned char *dwo_id)
10953 {
10954 const char *secname, *oldsym;
10955 char *tmp;
10956
10957 /* Unless we are outputting main CU, we may throw away empty ones. */
10958 if (!output_if_empty && die->die_child == NULL)
10959 return;
10960
10961 /* Even if there are no children of this DIE, we must output the information
10962 about the compilation unit. Otherwise, on an empty translation unit, we
10963 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10964 will then complain when examining the file. First mark all the DIEs in
10965 this CU so we know which get local refs. */
10966 mark_dies (die);
10967
10968 external_ref_hash_type *extern_map = optimize_external_refs (die);
10969
10970 /* For now, optimize only the main CU, in order to optimize the rest
10971 we'd need to see all of them earlier. Leave the rest for post-linking
10972 tools like DWZ. */
10973 if (die == comp_unit_die ())
10974 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10975
10976 build_abbrev_table (die, extern_map);
10977
10978 optimize_abbrev_table ();
10979
10980 delete extern_map;
10981
10982 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10983 next_die_offset = (dwo_id
10984 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10985 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10986 calc_die_sizes (die);
10987
10988 oldsym = die->die_id.die_symbol;
10989 if (oldsym && die->comdat_type_p)
10990 {
10991 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10992
10993 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10994 secname = tmp;
10995 die->die_id.die_symbol = NULL;
10996 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10997 }
10998 else
10999 {
11000 switch_to_section (debug_info_section);
11001 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11002 info_section_emitted = true;
11003 }
11004
11005 /* For LTO cross unit DIE refs we want a symbol on the start of the
11006 debuginfo section, not on the CU DIE. */
11007 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11008 {
11009 /* ??? No way to get visibility assembled without a decl. */
11010 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11011 get_identifier (oldsym), char_type_node);
11012 TREE_PUBLIC (decl) = true;
11013 TREE_STATIC (decl) = true;
11014 DECL_ARTIFICIAL (decl) = true;
11015 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11016 DECL_VISIBILITY_SPECIFIED (decl) = true;
11017 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11018 #ifdef ASM_WEAKEN_LABEL
11019 /* We prefer a .weak because that handles duplicates from duplicate
11020 archive members in a graceful way. */
11021 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11022 #else
11023 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11024 #endif
11025 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11026 }
11027
11028 /* Output debugging information. */
11029 output_compilation_unit_header (dwo_id
11030 ? DW_UT_split_compile : DW_UT_compile);
11031 if (dwarf_version >= 5)
11032 {
11033 if (dwo_id != NULL)
11034 for (int i = 0; i < 8; i++)
11035 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11036 }
11037 output_die (die);
11038
11039 /* Leave the marks on the main CU, so we can check them in
11040 output_pubnames. */
11041 if (oldsym)
11042 {
11043 unmark_dies (die);
11044 die->die_id.die_symbol = oldsym;
11045 }
11046 }
11047
11048 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11049 and .debug_pubtypes. This is configured per-target, but can be
11050 overridden by the -gpubnames or -gno-pubnames options. */
11051
11052 static inline bool
11053 want_pubnames (void)
11054 {
11055 if (debug_info_level <= DINFO_LEVEL_TERSE)
11056 return false;
11057 if (debug_generate_pub_sections != -1)
11058 return debug_generate_pub_sections;
11059 return targetm.want_debug_pub_sections;
11060 }
11061
11062 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11063
11064 static void
11065 add_AT_pubnames (dw_die_ref die)
11066 {
11067 if (want_pubnames ())
11068 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11069 }
11070
11071 /* Add a string attribute value to a skeleton DIE. */
11072
11073 static inline void
11074 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11075 const char *str)
11076 {
11077 dw_attr_node attr;
11078 struct indirect_string_node *node;
11079
11080 if (! skeleton_debug_str_hash)
11081 skeleton_debug_str_hash
11082 = hash_table<indirect_string_hasher>::create_ggc (10);
11083
11084 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11085 find_string_form (node);
11086 if (node->form == DW_FORM_GNU_str_index)
11087 node->form = DW_FORM_strp;
11088
11089 attr.dw_attr = attr_kind;
11090 attr.dw_attr_val.val_class = dw_val_class_str;
11091 attr.dw_attr_val.val_entry = NULL;
11092 attr.dw_attr_val.v.val_str = node;
11093 add_dwarf_attr (die, &attr);
11094 }
11095
11096 /* Helper function to generate top-level dies for skeleton debug_info and
11097 debug_types. */
11098
11099 static void
11100 add_top_level_skeleton_die_attrs (dw_die_ref die)
11101 {
11102 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11103 const char *comp_dir = comp_dir_string ();
11104
11105 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11106 if (comp_dir != NULL)
11107 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11108 add_AT_pubnames (die);
11109 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
11110 }
11111
11112 /* Output skeleton debug sections that point to the dwo file. */
11113
11114 static void
11115 output_skeleton_debug_sections (dw_die_ref comp_unit,
11116 const unsigned char *dwo_id)
11117 {
11118 /* These attributes will be found in the full debug_info section. */
11119 remove_AT (comp_unit, DW_AT_producer);
11120 remove_AT (comp_unit, DW_AT_language);
11121
11122 switch_to_section (debug_skeleton_info_section);
11123 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11124
11125 /* Produce the skeleton compilation-unit header. This one differs enough from
11126 a normal CU header that it's better not to call output_compilation_unit
11127 header. */
11128 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11129 dw2_asm_output_data (4, 0xffffffff,
11130 "Initial length escape value indicating 64-bit "
11131 "DWARF extension");
11132
11133 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11134 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11135 - DWARF_INITIAL_LENGTH_SIZE
11136 + size_of_die (comp_unit),
11137 "Length of Compilation Unit Info");
11138 output_dwarf_version ();
11139 if (dwarf_version >= 5)
11140 {
11141 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11142 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11143 }
11144 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11145 debug_skeleton_abbrev_section,
11146 "Offset Into Abbrev. Section");
11147 if (dwarf_version < 5)
11148 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11149 else
11150 for (int i = 0; i < 8; i++)
11151 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11152
11153 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11154 output_die (comp_unit);
11155
11156 /* Build the skeleton debug_abbrev section. */
11157 switch_to_section (debug_skeleton_abbrev_section);
11158 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11159
11160 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11161
11162 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11163 }
11164
11165 /* Output a comdat type unit DIE and its children. */
11166
11167 static void
11168 output_comdat_type_unit (comdat_type_node *node)
11169 {
11170 const char *secname;
11171 char *tmp;
11172 int i;
11173 #if defined (OBJECT_FORMAT_ELF)
11174 tree comdat_key;
11175 #endif
11176
11177 /* First mark all the DIEs in this CU so we know which get local refs. */
11178 mark_dies (node->root_die);
11179
11180 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11181
11182 build_abbrev_table (node->root_die, extern_map);
11183
11184 delete extern_map;
11185 extern_map = NULL;
11186
11187 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11188 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11189 calc_die_sizes (node->root_die);
11190
11191 #if defined (OBJECT_FORMAT_ELF)
11192 if (dwarf_version >= 5)
11193 {
11194 if (!dwarf_split_debug_info)
11195 secname = ".debug_info";
11196 else
11197 secname = ".debug_info.dwo";
11198 }
11199 else if (!dwarf_split_debug_info)
11200 secname = ".debug_types";
11201 else
11202 secname = ".debug_types.dwo";
11203
11204 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11205 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11206 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11207 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11208 comdat_key = get_identifier (tmp);
11209 targetm.asm_out.named_section (secname,
11210 SECTION_DEBUG | SECTION_LINKONCE,
11211 comdat_key);
11212 #else
11213 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11214 sprintf (tmp, (dwarf_version >= 5
11215 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11216 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11217 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11218 secname = tmp;
11219 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11220 #endif
11221
11222 /* Output debugging information. */
11223 output_compilation_unit_header (dwarf_split_debug_info
11224 ? DW_UT_split_type : DW_UT_type);
11225 output_signature (node->signature, "Type Signature");
11226 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11227 "Offset to Type DIE");
11228 output_die (node->root_die);
11229
11230 unmark_dies (node->root_die);
11231 }
11232
11233 /* Return the DWARF2/3 pubname associated with a decl. */
11234
11235 static const char *
11236 dwarf2_name (tree decl, int scope)
11237 {
11238 if (DECL_NAMELESS (decl))
11239 return NULL;
11240 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11241 }
11242
11243 /* Add a new entry to .debug_pubnames if appropriate. */
11244
11245 static void
11246 add_pubname_string (const char *str, dw_die_ref die)
11247 {
11248 pubname_entry e;
11249
11250 e.die = die;
11251 e.name = xstrdup (str);
11252 vec_safe_push (pubname_table, e);
11253 }
11254
11255 static void
11256 add_pubname (tree decl, dw_die_ref die)
11257 {
11258 if (!want_pubnames ())
11259 return;
11260
11261 /* Don't add items to the table when we expect that the consumer will have
11262 just read the enclosing die. For example, if the consumer is looking at a
11263 class_member, it will either be inside the class already, or will have just
11264 looked up the class to find the member. Either way, searching the class is
11265 faster than searching the index. */
11266 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11267 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11268 {
11269 const char *name = dwarf2_name (decl, 1);
11270
11271 if (name)
11272 add_pubname_string (name, die);
11273 }
11274 }
11275
11276 /* Add an enumerator to the pubnames section. */
11277
11278 static void
11279 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11280 {
11281 pubname_entry e;
11282
11283 gcc_assert (scope_name);
11284 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11285 e.die = die;
11286 vec_safe_push (pubname_table, e);
11287 }
11288
11289 /* Add a new entry to .debug_pubtypes if appropriate. */
11290
11291 static void
11292 add_pubtype (tree decl, dw_die_ref die)
11293 {
11294 pubname_entry e;
11295
11296 if (!want_pubnames ())
11297 return;
11298
11299 if ((TREE_PUBLIC (decl)
11300 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11301 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11302 {
11303 tree scope = NULL;
11304 const char *scope_name = "";
11305 const char *sep = is_cxx () ? "::" : ".";
11306 const char *name;
11307
11308 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11309 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11310 {
11311 scope_name = lang_hooks.dwarf_name (scope, 1);
11312 if (scope_name != NULL && scope_name[0] != '\0')
11313 scope_name = concat (scope_name, sep, NULL);
11314 else
11315 scope_name = "";
11316 }
11317
11318 if (TYPE_P (decl))
11319 name = type_tag (decl);
11320 else
11321 name = lang_hooks.dwarf_name (decl, 1);
11322
11323 /* If we don't have a name for the type, there's no point in adding
11324 it to the table. */
11325 if (name != NULL && name[0] != '\0')
11326 {
11327 e.die = die;
11328 e.name = concat (scope_name, name, NULL);
11329 vec_safe_push (pubtype_table, e);
11330 }
11331
11332 /* Although it might be more consistent to add the pubinfo for the
11333 enumerators as their dies are created, they should only be added if the
11334 enum type meets the criteria above. So rather than re-check the parent
11335 enum type whenever an enumerator die is created, just output them all
11336 here. This isn't protected by the name conditional because anonymous
11337 enums don't have names. */
11338 if (die->die_tag == DW_TAG_enumeration_type)
11339 {
11340 dw_die_ref c;
11341
11342 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11343 }
11344 }
11345 }
11346
11347 /* Output a single entry in the pubnames table. */
11348
11349 static void
11350 output_pubname (dw_offset die_offset, pubname_entry *entry)
11351 {
11352 dw_die_ref die = entry->die;
11353 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11354
11355 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11356
11357 if (debug_generate_pub_sections == 2)
11358 {
11359 /* This logic follows gdb's method for determining the value of the flag
11360 byte. */
11361 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11362 switch (die->die_tag)
11363 {
11364 case DW_TAG_typedef:
11365 case DW_TAG_base_type:
11366 case DW_TAG_subrange_type:
11367 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11368 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11369 break;
11370 case DW_TAG_enumerator:
11371 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11372 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11373 if (!is_cxx ())
11374 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11375 break;
11376 case DW_TAG_subprogram:
11377 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11378 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11379 if (!is_ada ())
11380 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11381 break;
11382 case DW_TAG_constant:
11383 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11384 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11385 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11386 break;
11387 case DW_TAG_variable:
11388 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11389 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11390 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11391 break;
11392 case DW_TAG_namespace:
11393 case DW_TAG_imported_declaration:
11394 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11395 break;
11396 case DW_TAG_class_type:
11397 case DW_TAG_interface_type:
11398 case DW_TAG_structure_type:
11399 case DW_TAG_union_type:
11400 case DW_TAG_enumeration_type:
11401 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11402 if (!is_cxx ())
11403 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11404 break;
11405 default:
11406 /* An unusual tag. Leave the flag-byte empty. */
11407 break;
11408 }
11409 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11410 "GDB-index flags");
11411 }
11412
11413 dw2_asm_output_nstring (entry->name, -1, "external name");
11414 }
11415
11416
11417 /* Output the public names table used to speed up access to externally
11418 visible names; or the public types table used to find type definitions. */
11419
11420 static void
11421 output_pubnames (vec<pubname_entry, va_gc> *names)
11422 {
11423 unsigned i;
11424 unsigned long pubnames_length = size_of_pubnames (names);
11425 pubname_entry *pub;
11426
11427 if (!XCOFF_DEBUGGING_INFO)
11428 {
11429 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11430 dw2_asm_output_data (4, 0xffffffff,
11431 "Initial length escape value indicating 64-bit DWARF extension");
11432 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11433 "Pub Info Length");
11434 }
11435
11436 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11437 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11438
11439 if (dwarf_split_debug_info)
11440 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11441 debug_skeleton_info_section,
11442 "Offset of Compilation Unit Info");
11443 else
11444 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11445 debug_info_section,
11446 "Offset of Compilation Unit Info");
11447 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11448 "Compilation Unit Length");
11449
11450 FOR_EACH_VEC_ELT (*names, i, pub)
11451 {
11452 if (include_pubname_in_output (names, pub))
11453 {
11454 dw_offset die_offset = pub->die->die_offset;
11455
11456 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11457 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11458 gcc_assert (pub->die->die_mark);
11459
11460 /* If we're putting types in their own .debug_types sections,
11461 the .debug_pubtypes table will still point to the compile
11462 unit (not the type unit), so we want to use the offset of
11463 the skeleton DIE (if there is one). */
11464 if (pub->die->comdat_type_p && names == pubtype_table)
11465 {
11466 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11467
11468 if (type_node != NULL)
11469 die_offset = (type_node->skeleton_die != NULL
11470 ? type_node->skeleton_die->die_offset
11471 : comp_unit_die ()->die_offset);
11472 }
11473
11474 output_pubname (die_offset, pub);
11475 }
11476 }
11477
11478 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11479 }
11480
11481 /* Output public names and types tables if necessary. */
11482
11483 static void
11484 output_pubtables (void)
11485 {
11486 if (!want_pubnames () || !info_section_emitted)
11487 return;
11488
11489 switch_to_section (debug_pubnames_section);
11490 output_pubnames (pubname_table);
11491 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11492 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11493 simply won't look for the section. */
11494 switch_to_section (debug_pubtypes_section);
11495 output_pubnames (pubtype_table);
11496 }
11497
11498
11499 /* Output the information that goes into the .debug_aranges table.
11500 Namely, define the beginning and ending address range of the
11501 text section generated for this compilation unit. */
11502
11503 static void
11504 output_aranges (void)
11505 {
11506 unsigned i;
11507 unsigned long aranges_length = size_of_aranges ();
11508
11509 if (!XCOFF_DEBUGGING_INFO)
11510 {
11511 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11512 dw2_asm_output_data (4, 0xffffffff,
11513 "Initial length escape value indicating 64-bit DWARF extension");
11514 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11515 "Length of Address Ranges Info");
11516 }
11517
11518 /* Version number for aranges is still 2, even up to DWARF5. */
11519 dw2_asm_output_data (2, 2, "DWARF aranges version");
11520 if (dwarf_split_debug_info)
11521 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11522 debug_skeleton_info_section,
11523 "Offset of Compilation Unit Info");
11524 else
11525 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11526 debug_info_section,
11527 "Offset of Compilation Unit Info");
11528 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11529 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11530
11531 /* We need to align to twice the pointer size here. */
11532 if (DWARF_ARANGES_PAD_SIZE)
11533 {
11534 /* Pad using a 2 byte words so that padding is correct for any
11535 pointer size. */
11536 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11537 2 * DWARF2_ADDR_SIZE);
11538 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11539 dw2_asm_output_data (2, 0, NULL);
11540 }
11541
11542 /* It is necessary not to output these entries if the sections were
11543 not used; if the sections were not used, the length will be 0 and
11544 the address may end up as 0 if the section is discarded by ld
11545 --gc-sections, leaving an invalid (0, 0) entry that can be
11546 confused with the terminator. */
11547 if (text_section_used)
11548 {
11549 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11550 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11551 text_section_label, "Length");
11552 }
11553 if (cold_text_section_used)
11554 {
11555 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11556 "Address");
11557 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11558 cold_text_section_label, "Length");
11559 }
11560
11561 if (have_multiple_function_sections)
11562 {
11563 unsigned fde_idx;
11564 dw_fde_ref fde;
11565
11566 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11567 {
11568 if (DECL_IGNORED_P (fde->decl))
11569 continue;
11570 if (!fde->in_std_section)
11571 {
11572 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11573 "Address");
11574 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11575 fde->dw_fde_begin, "Length");
11576 }
11577 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11578 {
11579 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11580 "Address");
11581 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11582 fde->dw_fde_second_begin, "Length");
11583 }
11584 }
11585 }
11586
11587 /* Output the terminator words. */
11588 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11589 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11590 }
11591
11592 /* Add a new entry to .debug_ranges. Return its index into
11593 ranges_table vector. */
11594
11595 static unsigned int
11596 add_ranges_num (int num, bool maybe_new_sec)
11597 {
11598 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11599 vec_safe_push (ranges_table, r);
11600 return vec_safe_length (ranges_table) - 1;
11601 }
11602
11603 /* Add a new entry to .debug_ranges corresponding to a block, or a
11604 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11605 this entry might be in a different section from previous range. */
11606
11607 static unsigned int
11608 add_ranges (const_tree block, bool maybe_new_sec)
11609 {
11610 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11611 }
11612
11613 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11614 chain, or middle entry of a chain that will be directly referred to. */
11615
11616 static void
11617 note_rnglist_head (unsigned int offset)
11618 {
11619 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11620 return;
11621 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11622 }
11623
11624 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11625 When using dwarf_split_debug_info, address attributes in dies destined
11626 for the final executable should be direct references--setting the
11627 parameter force_direct ensures this behavior. */
11628
11629 static void
11630 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11631 bool *added, bool force_direct)
11632 {
11633 unsigned int in_use = vec_safe_length (ranges_by_label);
11634 unsigned int offset;
11635 dw_ranges_by_label rbl = { begin, end };
11636 vec_safe_push (ranges_by_label, rbl);
11637 offset = add_ranges_num (-(int)in_use - 1, true);
11638 if (!*added)
11639 {
11640 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11641 *added = true;
11642 note_rnglist_head (offset);
11643 }
11644 }
11645
11646 /* Emit .debug_ranges section. */
11647
11648 static void
11649 output_ranges (void)
11650 {
11651 unsigned i;
11652 static const char *const start_fmt = "Offset %#x";
11653 const char *fmt = start_fmt;
11654 dw_ranges *r;
11655
11656 switch_to_section (debug_ranges_section);
11657 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11658 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11659 {
11660 int block_num = r->num;
11661
11662 if (block_num > 0)
11663 {
11664 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11665 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11666
11667 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11668 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11669
11670 /* If all code is in the text section, then the compilation
11671 unit base address defaults to DW_AT_low_pc, which is the
11672 base of the text section. */
11673 if (!have_multiple_function_sections)
11674 {
11675 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11676 text_section_label,
11677 fmt, i * 2 * DWARF2_ADDR_SIZE);
11678 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11679 text_section_label, NULL);
11680 }
11681
11682 /* Otherwise, the compilation unit base address is zero,
11683 which allows us to use absolute addresses, and not worry
11684 about whether the target supports cross-section
11685 arithmetic. */
11686 else
11687 {
11688 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11689 fmt, i * 2 * DWARF2_ADDR_SIZE);
11690 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11691 }
11692
11693 fmt = NULL;
11694 }
11695
11696 /* Negative block_num stands for an index into ranges_by_label. */
11697 else if (block_num < 0)
11698 {
11699 int lab_idx = - block_num - 1;
11700
11701 if (!have_multiple_function_sections)
11702 {
11703 gcc_unreachable ();
11704 #if 0
11705 /* If we ever use add_ranges_by_labels () for a single
11706 function section, all we have to do is to take out
11707 the #if 0 above. */
11708 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11709 (*ranges_by_label)[lab_idx].begin,
11710 text_section_label,
11711 fmt, i * 2 * DWARF2_ADDR_SIZE);
11712 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11713 (*ranges_by_label)[lab_idx].end,
11714 text_section_label, NULL);
11715 #endif
11716 }
11717 else
11718 {
11719 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11720 (*ranges_by_label)[lab_idx].begin,
11721 fmt, i * 2 * DWARF2_ADDR_SIZE);
11722 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11723 (*ranges_by_label)[lab_idx].end,
11724 NULL);
11725 }
11726 }
11727 else
11728 {
11729 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11730 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11731 fmt = start_fmt;
11732 }
11733 }
11734 }
11735
11736 /* Non-zero if .debug_line_str should be used for .debug_line section
11737 strings or strings that are likely shareable with those. */
11738 #define DWARF5_USE_DEBUG_LINE_STR \
11739 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11740 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11741 /* FIXME: there is no .debug_line_str.dwo section, \
11742 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11743 && !dwarf_split_debug_info)
11744
11745 /* Assign .debug_rnglists indexes. */
11746
11747 static void
11748 index_rnglists (void)
11749 {
11750 unsigned i;
11751 dw_ranges *r;
11752
11753 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11754 if (r->label)
11755 r->idx = rnglist_idx++;
11756 }
11757
11758 /* Emit .debug_rnglists section. */
11759
11760 static void
11761 output_rnglists (unsigned generation)
11762 {
11763 unsigned i;
11764 dw_ranges *r;
11765 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11766 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11767 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11768
11769 switch_to_section (debug_ranges_section);
11770 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11771 /* There are up to 4 unique ranges labels per generation.
11772 See also init_sections_and_labels. */
11773 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11774 2 + generation * 4);
11775 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11776 3 + generation * 4);
11777 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11778 dw2_asm_output_data (4, 0xffffffff,
11779 "Initial length escape value indicating "
11780 "64-bit DWARF extension");
11781 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11782 "Length of Range Lists");
11783 ASM_OUTPUT_LABEL (asm_out_file, l1);
11784 output_dwarf_version ();
11785 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11786 dw2_asm_output_data (1, 0, "Segment Size");
11787 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11788 about relocation sizes and primarily care about the size of .debug*
11789 sections in linked shared libraries and executables, then
11790 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11791 into it are usually larger than just DW_FORM_sec_offset offsets
11792 into the .debug_rnglists section. */
11793 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11794 "Offset Entry Count");
11795 if (dwarf_split_debug_info)
11796 {
11797 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11798 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11799 if (r->label)
11800 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11801 ranges_base_label, NULL);
11802 }
11803
11804 const char *lab = "";
11805 unsigned int len = vec_safe_length (ranges_table);
11806 const char *base = NULL;
11807 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11808 {
11809 int block_num = r->num;
11810
11811 if (r->label)
11812 {
11813 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11814 lab = r->label;
11815 }
11816 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11817 base = NULL;
11818 if (block_num > 0)
11819 {
11820 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11821 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11822
11823 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11824 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11825
11826 if (HAVE_AS_LEB128)
11827 {
11828 /* If all code is in the text section, then the compilation
11829 unit base address defaults to DW_AT_low_pc, which is the
11830 base of the text section. */
11831 if (!have_multiple_function_sections)
11832 {
11833 dw2_asm_output_data (1, DW_RLE_offset_pair,
11834 "DW_RLE_offset_pair (%s)", lab);
11835 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11836 "Range begin address (%s)", lab);
11837 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11838 "Range end address (%s)", lab);
11839 continue;
11840 }
11841 if (base == NULL)
11842 {
11843 dw_ranges *r2 = NULL;
11844 if (i < len - 1)
11845 r2 = &(*ranges_table)[i + 1];
11846 if (r2
11847 && r2->num != 0
11848 && r2->label == NULL
11849 && !r2->maybe_new_sec)
11850 {
11851 dw2_asm_output_data (1, DW_RLE_base_address,
11852 "DW_RLE_base_address (%s)", lab);
11853 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11854 "Base address (%s)", lab);
11855 strcpy (basebuf, blabel);
11856 base = basebuf;
11857 }
11858 }
11859 if (base)
11860 {
11861 dw2_asm_output_data (1, DW_RLE_offset_pair,
11862 "DW_RLE_offset_pair (%s)", lab);
11863 dw2_asm_output_delta_uleb128 (blabel, base,
11864 "Range begin address (%s)", lab);
11865 dw2_asm_output_delta_uleb128 (elabel, base,
11866 "Range end address (%s)", lab);
11867 continue;
11868 }
11869 dw2_asm_output_data (1, DW_RLE_start_length,
11870 "DW_RLE_start_length (%s)", lab);
11871 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11872 "Range begin address (%s)", lab);
11873 dw2_asm_output_delta_uleb128 (elabel, blabel,
11874 "Range length (%s)", lab);
11875 }
11876 else
11877 {
11878 dw2_asm_output_data (1, DW_RLE_start_end,
11879 "DW_RLE_start_end (%s)", lab);
11880 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11881 "Range begin address (%s)", lab);
11882 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11883 "Range end address (%s)", lab);
11884 }
11885 }
11886
11887 /* Negative block_num stands for an index into ranges_by_label. */
11888 else if (block_num < 0)
11889 {
11890 int lab_idx = - block_num - 1;
11891 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11892 const char *elabel = (*ranges_by_label)[lab_idx].end;
11893
11894 if (!have_multiple_function_sections)
11895 gcc_unreachable ();
11896 if (HAVE_AS_LEB128)
11897 {
11898 dw2_asm_output_data (1, DW_RLE_start_length,
11899 "DW_RLE_start_length (%s)", lab);
11900 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11901 "Range begin address (%s)", lab);
11902 dw2_asm_output_delta_uleb128 (elabel, blabel,
11903 "Range length (%s)", lab);
11904 }
11905 else
11906 {
11907 dw2_asm_output_data (1, DW_RLE_start_end,
11908 "DW_RLE_start_end (%s)", lab);
11909 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11910 "Range begin address (%s)", lab);
11911 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11912 "Range end address (%s)", lab);
11913 }
11914 }
11915 else
11916 dw2_asm_output_data (1, DW_RLE_end_of_list,
11917 "DW_RLE_end_of_list (%s)", lab);
11918 }
11919 ASM_OUTPUT_LABEL (asm_out_file, l2);
11920 }
11921
11922 /* Data structure containing information about input files. */
11923 struct file_info
11924 {
11925 const char *path; /* Complete file name. */
11926 const char *fname; /* File name part. */
11927 int length; /* Length of entire string. */
11928 struct dwarf_file_data * file_idx; /* Index in input file table. */
11929 int dir_idx; /* Index in directory table. */
11930 };
11931
11932 /* Data structure containing information about directories with source
11933 files. */
11934 struct dir_info
11935 {
11936 const char *path; /* Path including directory name. */
11937 int length; /* Path length. */
11938 int prefix; /* Index of directory entry which is a prefix. */
11939 int count; /* Number of files in this directory. */
11940 int dir_idx; /* Index of directory used as base. */
11941 };
11942
11943 /* Callback function for file_info comparison. We sort by looking at
11944 the directories in the path. */
11945
11946 static int
11947 file_info_cmp (const void *p1, const void *p2)
11948 {
11949 const struct file_info *const s1 = (const struct file_info *) p1;
11950 const struct file_info *const s2 = (const struct file_info *) p2;
11951 const unsigned char *cp1;
11952 const unsigned char *cp2;
11953
11954 /* Take care of file names without directories. We need to make sure that
11955 we return consistent values to qsort since some will get confused if
11956 we return the same value when identical operands are passed in opposite
11957 orders. So if neither has a directory, return 0 and otherwise return
11958 1 or -1 depending on which one has the directory. */
11959 if ((s1->path == s1->fname || s2->path == s2->fname))
11960 return (s2->path == s2->fname) - (s1->path == s1->fname);
11961
11962 cp1 = (const unsigned char *) s1->path;
11963 cp2 = (const unsigned char *) s2->path;
11964
11965 while (1)
11966 {
11967 ++cp1;
11968 ++cp2;
11969 /* Reached the end of the first path? If so, handle like above. */
11970 if ((cp1 == (const unsigned char *) s1->fname)
11971 || (cp2 == (const unsigned char *) s2->fname))
11972 return ((cp2 == (const unsigned char *) s2->fname)
11973 - (cp1 == (const unsigned char *) s1->fname));
11974
11975 /* Character of current path component the same? */
11976 else if (*cp1 != *cp2)
11977 return *cp1 - *cp2;
11978 }
11979 }
11980
11981 struct file_name_acquire_data
11982 {
11983 struct file_info *files;
11984 int used_files;
11985 int max_files;
11986 };
11987
11988 /* Traversal function for the hash table. */
11989
11990 int
11991 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11992 {
11993 struct dwarf_file_data *d = *slot;
11994 struct file_info *fi;
11995 const char *f;
11996
11997 gcc_assert (fnad->max_files >= d->emitted_number);
11998
11999 if (! d->emitted_number)
12000 return 1;
12001
12002 gcc_assert (fnad->max_files != fnad->used_files);
12003
12004 fi = fnad->files + fnad->used_files++;
12005
12006 /* Skip all leading "./". */
12007 f = d->filename;
12008 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12009 f += 2;
12010
12011 /* Create a new array entry. */
12012 fi->path = f;
12013 fi->length = strlen (f);
12014 fi->file_idx = d;
12015
12016 /* Search for the file name part. */
12017 f = strrchr (f, DIR_SEPARATOR);
12018 #if defined (DIR_SEPARATOR_2)
12019 {
12020 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12021
12022 if (g != NULL)
12023 {
12024 if (f == NULL || f < g)
12025 f = g;
12026 }
12027 }
12028 #endif
12029
12030 fi->fname = f == NULL ? fi->path : f + 1;
12031 return 1;
12032 }
12033
12034 /* Helper function for output_file_names. Emit a FORM encoded
12035 string STR, with assembly comment start ENTRY_KIND and
12036 index IDX */
12037
12038 static void
12039 output_line_string (enum dwarf_form form, const char *str,
12040 const char *entry_kind, unsigned int idx)
12041 {
12042 switch (form)
12043 {
12044 case DW_FORM_string:
12045 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12046 break;
12047 case DW_FORM_line_strp:
12048 if (!debug_line_str_hash)
12049 debug_line_str_hash
12050 = hash_table<indirect_string_hasher>::create_ggc (10);
12051
12052 struct indirect_string_node *node;
12053 node = find_AT_string_in_table (str, debug_line_str_hash);
12054 set_indirect_string (node);
12055 node->form = form;
12056 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12057 debug_line_str_section, "%s: %#x: \"%s\"",
12058 entry_kind, 0, node->str);
12059 break;
12060 default:
12061 gcc_unreachable ();
12062 }
12063 }
12064
12065 /* Output the directory table and the file name table. We try to minimize
12066 the total amount of memory needed. A heuristic is used to avoid large
12067 slowdowns with many input files. */
12068
12069 static void
12070 output_file_names (void)
12071 {
12072 struct file_name_acquire_data fnad;
12073 int numfiles;
12074 struct file_info *files;
12075 struct dir_info *dirs;
12076 int *saved;
12077 int *savehere;
12078 int *backmap;
12079 int ndirs;
12080 int idx_offset;
12081 int i;
12082
12083 if (!last_emitted_file)
12084 {
12085 if (dwarf_version >= 5)
12086 {
12087 dw2_asm_output_data (1, 0, "Directory entry format count");
12088 dw2_asm_output_data_uleb128 (0, "Directories count");
12089 dw2_asm_output_data (1, 0, "File name entry format count");
12090 dw2_asm_output_data_uleb128 (0, "File names count");
12091 }
12092 else
12093 {
12094 dw2_asm_output_data (1, 0, "End directory table");
12095 dw2_asm_output_data (1, 0, "End file name table");
12096 }
12097 return;
12098 }
12099
12100 numfiles = last_emitted_file->emitted_number;
12101
12102 /* Allocate the various arrays we need. */
12103 files = XALLOCAVEC (struct file_info, numfiles);
12104 dirs = XALLOCAVEC (struct dir_info, numfiles);
12105
12106 fnad.files = files;
12107 fnad.used_files = 0;
12108 fnad.max_files = numfiles;
12109 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12110 gcc_assert (fnad.used_files == fnad.max_files);
12111
12112 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12113
12114 /* Find all the different directories used. */
12115 dirs[0].path = files[0].path;
12116 dirs[0].length = files[0].fname - files[0].path;
12117 dirs[0].prefix = -1;
12118 dirs[0].count = 1;
12119 dirs[0].dir_idx = 0;
12120 files[0].dir_idx = 0;
12121 ndirs = 1;
12122
12123 for (i = 1; i < numfiles; i++)
12124 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12125 && memcmp (dirs[ndirs - 1].path, files[i].path,
12126 dirs[ndirs - 1].length) == 0)
12127 {
12128 /* Same directory as last entry. */
12129 files[i].dir_idx = ndirs - 1;
12130 ++dirs[ndirs - 1].count;
12131 }
12132 else
12133 {
12134 int j;
12135
12136 /* This is a new directory. */
12137 dirs[ndirs].path = files[i].path;
12138 dirs[ndirs].length = files[i].fname - files[i].path;
12139 dirs[ndirs].count = 1;
12140 dirs[ndirs].dir_idx = ndirs;
12141 files[i].dir_idx = ndirs;
12142
12143 /* Search for a prefix. */
12144 dirs[ndirs].prefix = -1;
12145 for (j = 0; j < ndirs; j++)
12146 if (dirs[j].length < dirs[ndirs].length
12147 && dirs[j].length > 1
12148 && (dirs[ndirs].prefix == -1
12149 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12150 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12151 dirs[ndirs].prefix = j;
12152
12153 ++ndirs;
12154 }
12155
12156 /* Now to the actual work. We have to find a subset of the directories which
12157 allow expressing the file name using references to the directory table
12158 with the least amount of characters. We do not do an exhaustive search
12159 where we would have to check out every combination of every single
12160 possible prefix. Instead we use a heuristic which provides nearly optimal
12161 results in most cases and never is much off. */
12162 saved = XALLOCAVEC (int, ndirs);
12163 savehere = XALLOCAVEC (int, ndirs);
12164
12165 memset (saved, '\0', ndirs * sizeof (saved[0]));
12166 for (i = 0; i < ndirs; i++)
12167 {
12168 int j;
12169 int total;
12170
12171 /* We can always save some space for the current directory. But this
12172 does not mean it will be enough to justify adding the directory. */
12173 savehere[i] = dirs[i].length;
12174 total = (savehere[i] - saved[i]) * dirs[i].count;
12175
12176 for (j = i + 1; j < ndirs; j++)
12177 {
12178 savehere[j] = 0;
12179 if (saved[j] < dirs[i].length)
12180 {
12181 /* Determine whether the dirs[i] path is a prefix of the
12182 dirs[j] path. */
12183 int k;
12184
12185 k = dirs[j].prefix;
12186 while (k != -1 && k != (int) i)
12187 k = dirs[k].prefix;
12188
12189 if (k == (int) i)
12190 {
12191 /* Yes it is. We can possibly save some memory by
12192 writing the filenames in dirs[j] relative to
12193 dirs[i]. */
12194 savehere[j] = dirs[i].length;
12195 total += (savehere[j] - saved[j]) * dirs[j].count;
12196 }
12197 }
12198 }
12199
12200 /* Check whether we can save enough to justify adding the dirs[i]
12201 directory. */
12202 if (total > dirs[i].length + 1)
12203 {
12204 /* It's worthwhile adding. */
12205 for (j = i; j < ndirs; j++)
12206 if (savehere[j] > 0)
12207 {
12208 /* Remember how much we saved for this directory so far. */
12209 saved[j] = savehere[j];
12210
12211 /* Remember the prefix directory. */
12212 dirs[j].dir_idx = i;
12213 }
12214 }
12215 }
12216
12217 /* Emit the directory name table. */
12218 idx_offset = dirs[0].length > 0 ? 1 : 0;
12219 enum dwarf_form str_form = DW_FORM_string;
12220 enum dwarf_form idx_form = DW_FORM_udata;
12221 if (dwarf_version >= 5)
12222 {
12223 const char *comp_dir = comp_dir_string ();
12224 if (comp_dir == NULL)
12225 comp_dir = "";
12226 dw2_asm_output_data (1, 1, "Directory entry format count");
12227 if (DWARF5_USE_DEBUG_LINE_STR)
12228 str_form = DW_FORM_line_strp;
12229 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12230 dw2_asm_output_data_uleb128 (str_form, "%s",
12231 get_DW_FORM_name (str_form));
12232 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12233 if (str_form == DW_FORM_string)
12234 {
12235 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12236 for (i = 1 - idx_offset; i < ndirs; i++)
12237 dw2_asm_output_nstring (dirs[i].path,
12238 dirs[i].length
12239 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12240 "Directory Entry: %#x", i + idx_offset);
12241 }
12242 else
12243 {
12244 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12245 for (i = 1 - idx_offset; i < ndirs; i++)
12246 {
12247 const char *str
12248 = ggc_alloc_string (dirs[i].path,
12249 dirs[i].length
12250 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12251 output_line_string (str_form, str, "Directory Entry",
12252 (unsigned) i + idx_offset);
12253 }
12254 }
12255 }
12256 else
12257 {
12258 for (i = 1 - idx_offset; i < ndirs; i++)
12259 dw2_asm_output_nstring (dirs[i].path,
12260 dirs[i].length
12261 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12262 "Directory Entry: %#x", i + idx_offset);
12263
12264 dw2_asm_output_data (1, 0, "End directory table");
12265 }
12266
12267 /* We have to emit them in the order of emitted_number since that's
12268 used in the debug info generation. To do this efficiently we
12269 generate a back-mapping of the indices first. */
12270 backmap = XALLOCAVEC (int, numfiles);
12271 for (i = 0; i < numfiles; i++)
12272 backmap[files[i].file_idx->emitted_number - 1] = i;
12273
12274 if (dwarf_version >= 5)
12275 {
12276 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12277 if (filename0 == NULL)
12278 filename0 = "";
12279 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12280 DW_FORM_data2. Choose one based on the number of directories
12281 and how much space would they occupy in each encoding.
12282 If we have at most 256 directories, all indexes fit into
12283 a single byte, so DW_FORM_data1 is most compact (if there
12284 are at most 128 directories, DW_FORM_udata would be as
12285 compact as that, but not shorter and slower to decode). */
12286 if (ndirs + idx_offset <= 256)
12287 idx_form = DW_FORM_data1;
12288 /* If there are more than 65536 directories, we have to use
12289 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12290 Otherwise, compute what space would occupy if all the indexes
12291 used DW_FORM_udata - sum - and compare that to how large would
12292 be DW_FORM_data2 encoding, and pick the more efficient one. */
12293 else if (ndirs + idx_offset <= 65536)
12294 {
12295 unsigned HOST_WIDE_INT sum = 1;
12296 for (i = 0; i < numfiles; i++)
12297 {
12298 int file_idx = backmap[i];
12299 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12300 sum += size_of_uleb128 (dir_idx);
12301 }
12302 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12303 idx_form = DW_FORM_data2;
12304 }
12305 #ifdef VMS_DEBUGGING_INFO
12306 dw2_asm_output_data (1, 4, "File name entry format count");
12307 #else
12308 dw2_asm_output_data (1, 2, "File name entry format count");
12309 #endif
12310 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12311 dw2_asm_output_data_uleb128 (str_form, "%s",
12312 get_DW_FORM_name (str_form));
12313 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12314 "DW_LNCT_directory_index");
12315 dw2_asm_output_data_uleb128 (idx_form, "%s",
12316 get_DW_FORM_name (idx_form));
12317 #ifdef VMS_DEBUGGING_INFO
12318 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12319 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12320 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12321 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12322 #endif
12323 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12324
12325 output_line_string (str_form, filename0, "File Entry", 0);
12326
12327 /* Include directory index. */
12328 if (idx_form != DW_FORM_udata)
12329 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12330 0, NULL);
12331 else
12332 dw2_asm_output_data_uleb128 (0, NULL);
12333
12334 #ifdef VMS_DEBUGGING_INFO
12335 dw2_asm_output_data_uleb128 (0, NULL);
12336 dw2_asm_output_data_uleb128 (0, NULL);
12337 #endif
12338 }
12339
12340 /* Now write all the file names. */
12341 for (i = 0; i < numfiles; i++)
12342 {
12343 int file_idx = backmap[i];
12344 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12345
12346 #ifdef VMS_DEBUGGING_INFO
12347 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12348
12349 /* Setting these fields can lead to debugger miscomparisons,
12350 but VMS Debug requires them to be set correctly. */
12351
12352 int ver;
12353 long long cdt;
12354 long siz;
12355 int maxfilelen = (strlen (files[file_idx].path)
12356 + dirs[dir_idx].length
12357 + MAX_VMS_VERSION_LEN + 1);
12358 char *filebuf = XALLOCAVEC (char, maxfilelen);
12359
12360 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12361 snprintf (filebuf, maxfilelen, "%s;%d",
12362 files[file_idx].path + dirs[dir_idx].length, ver);
12363
12364 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12365
12366 /* Include directory index. */
12367 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12368 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12369 dir_idx + idx_offset, NULL);
12370 else
12371 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12372
12373 /* Modification time. */
12374 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12375 &cdt, 0, 0, 0) == 0)
12376 ? cdt : 0, NULL);
12377
12378 /* File length in bytes. */
12379 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12380 0, &siz, 0, 0) == 0)
12381 ? siz : 0, NULL);
12382 #else
12383 output_line_string (str_form,
12384 files[file_idx].path + dirs[dir_idx].length,
12385 "File Entry", (unsigned) i + 1);
12386
12387 /* Include directory index. */
12388 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12389 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12390 dir_idx + idx_offset, NULL);
12391 else
12392 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12393
12394 if (dwarf_version >= 5)
12395 continue;
12396
12397 /* Modification time. */
12398 dw2_asm_output_data_uleb128 (0, NULL);
12399
12400 /* File length in bytes. */
12401 dw2_asm_output_data_uleb128 (0, NULL);
12402 #endif /* VMS_DEBUGGING_INFO */
12403 }
12404
12405 if (dwarf_version < 5)
12406 dw2_asm_output_data (1, 0, "End file name table");
12407 }
12408
12409
12410 /* Output one line number table into the .debug_line section. */
12411
12412 static void
12413 output_one_line_info_table (dw_line_info_table *table)
12414 {
12415 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12416 unsigned int current_line = 1;
12417 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12418 dw_line_info_entry *ent, *prev_addr;
12419 size_t i;
12420 unsigned int view;
12421
12422 view = 0;
12423
12424 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12425 {
12426 switch (ent->opcode)
12427 {
12428 case LI_set_address:
12429 /* ??? Unfortunately, we have little choice here currently, and
12430 must always use the most general form. GCC does not know the
12431 address delta itself, so we can't use DW_LNS_advance_pc. Many
12432 ports do have length attributes which will give an upper bound
12433 on the address range. We could perhaps use length attributes
12434 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12435 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12436
12437 view = 0;
12438
12439 /* This can handle any delta. This takes
12440 4+DWARF2_ADDR_SIZE bytes. */
12441 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12442 debug_variable_location_views
12443 ? ", reset view to 0" : "");
12444 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12445 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12446 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12447
12448 prev_addr = ent;
12449 break;
12450
12451 case LI_adv_address:
12452 {
12453 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12454 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12455 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12456
12457 view++;
12458
12459 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12460 dw2_asm_output_delta (2, line_label, prev_label,
12461 "from %s to %s", prev_label, line_label);
12462
12463 prev_addr = ent;
12464 break;
12465 }
12466
12467 case LI_set_line:
12468 if (ent->val == current_line)
12469 {
12470 /* We still need to start a new row, so output a copy insn. */
12471 dw2_asm_output_data (1, DW_LNS_copy,
12472 "copy line %u", current_line);
12473 }
12474 else
12475 {
12476 int line_offset = ent->val - current_line;
12477 int line_delta = line_offset - DWARF_LINE_BASE;
12478
12479 current_line = ent->val;
12480 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12481 {
12482 /* This can handle deltas from -10 to 234, using the current
12483 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12484 This takes 1 byte. */
12485 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12486 "line %u", current_line);
12487 }
12488 else
12489 {
12490 /* This can handle any delta. This takes at least 4 bytes,
12491 depending on the value being encoded. */
12492 dw2_asm_output_data (1, DW_LNS_advance_line,
12493 "advance to line %u", current_line);
12494 dw2_asm_output_data_sleb128 (line_offset, NULL);
12495 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12496 }
12497 }
12498 break;
12499
12500 case LI_set_file:
12501 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12502 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12503 break;
12504
12505 case LI_set_column:
12506 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12507 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12508 break;
12509
12510 case LI_negate_stmt:
12511 current_is_stmt = !current_is_stmt;
12512 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12513 "is_stmt %d", current_is_stmt);
12514 break;
12515
12516 case LI_set_prologue_end:
12517 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12518 "set prologue end");
12519 break;
12520
12521 case LI_set_epilogue_begin:
12522 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12523 "set epilogue begin");
12524 break;
12525
12526 case LI_set_discriminator:
12527 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12528 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12529 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12530 dw2_asm_output_data_uleb128 (ent->val, NULL);
12531 break;
12532 }
12533 }
12534
12535 /* Emit debug info for the address of the end of the table. */
12536 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12537 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12538 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12539 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12540
12541 dw2_asm_output_data (1, 0, "end sequence");
12542 dw2_asm_output_data_uleb128 (1, NULL);
12543 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12544 }
12545
12546 /* Output the source line number correspondence information. This
12547 information goes into the .debug_line section. */
12548
12549 static void
12550 output_line_info (bool prologue_only)
12551 {
12552 static unsigned int generation;
12553 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12554 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12555 bool saw_one = false;
12556 int opc;
12557
12558 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12559 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12560 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12561 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12562
12563 if (!XCOFF_DEBUGGING_INFO)
12564 {
12565 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12566 dw2_asm_output_data (4, 0xffffffff,
12567 "Initial length escape value indicating 64-bit DWARF extension");
12568 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12569 "Length of Source Line Info");
12570 }
12571
12572 ASM_OUTPUT_LABEL (asm_out_file, l1);
12573
12574 output_dwarf_version ();
12575 if (dwarf_version >= 5)
12576 {
12577 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12578 dw2_asm_output_data (1, 0, "Segment Size");
12579 }
12580 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12581 ASM_OUTPUT_LABEL (asm_out_file, p1);
12582
12583 /* Define the architecture-dependent minimum instruction length (in bytes).
12584 In this implementation of DWARF, this field is used for information
12585 purposes only. Since GCC generates assembly language, we have no
12586 a priori knowledge of how many instruction bytes are generated for each
12587 source line, and therefore can use only the DW_LNE_set_address and
12588 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12589 this as '1', which is "correct enough" for all architectures,
12590 and don't let the target override. */
12591 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12592
12593 if (dwarf_version >= 4)
12594 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12595 "Maximum Operations Per Instruction");
12596 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12597 "Default is_stmt_start flag");
12598 dw2_asm_output_data (1, DWARF_LINE_BASE,
12599 "Line Base Value (Special Opcodes)");
12600 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12601 "Line Range Value (Special Opcodes)");
12602 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12603 "Special Opcode Base");
12604
12605 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12606 {
12607 int n_op_args;
12608 switch (opc)
12609 {
12610 case DW_LNS_advance_pc:
12611 case DW_LNS_advance_line:
12612 case DW_LNS_set_file:
12613 case DW_LNS_set_column:
12614 case DW_LNS_fixed_advance_pc:
12615 case DW_LNS_set_isa:
12616 n_op_args = 1;
12617 break;
12618 default:
12619 n_op_args = 0;
12620 break;
12621 }
12622
12623 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12624 opc, n_op_args);
12625 }
12626
12627 /* Write out the information about the files we use. */
12628 output_file_names ();
12629 ASM_OUTPUT_LABEL (asm_out_file, p2);
12630 if (prologue_only)
12631 {
12632 /* Output the marker for the end of the line number info. */
12633 ASM_OUTPUT_LABEL (asm_out_file, l2);
12634 return;
12635 }
12636
12637 if (separate_line_info)
12638 {
12639 dw_line_info_table *table;
12640 size_t i;
12641
12642 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12643 if (table->in_use)
12644 {
12645 output_one_line_info_table (table);
12646 saw_one = true;
12647 }
12648 }
12649 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12650 {
12651 output_one_line_info_table (cold_text_section_line_info);
12652 saw_one = true;
12653 }
12654
12655 /* ??? Some Darwin linkers crash on a .debug_line section with no
12656 sequences. Further, merely a DW_LNE_end_sequence entry is not
12657 sufficient -- the address column must also be initialized.
12658 Make sure to output at least one set_address/end_sequence pair,
12659 choosing .text since that section is always present. */
12660 if (text_section_line_info->in_use || !saw_one)
12661 output_one_line_info_table (text_section_line_info);
12662
12663 /* Output the marker for the end of the line number info. */
12664 ASM_OUTPUT_LABEL (asm_out_file, l2);
12665 }
12666 \f
12667 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12668
12669 static inline bool
12670 need_endianity_attribute_p (bool reverse)
12671 {
12672 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12673 }
12674
12675 /* Given a pointer to a tree node for some base type, return a pointer to
12676 a DIE that describes the given type. REVERSE is true if the type is
12677 to be interpreted in the reverse storage order wrt the target order.
12678
12679 This routine must only be called for GCC type nodes that correspond to
12680 Dwarf base (fundamental) types. */
12681
12682 static dw_die_ref
12683 base_type_die (tree type, bool reverse)
12684 {
12685 dw_die_ref base_type_result;
12686 enum dwarf_type encoding;
12687 bool fpt_used = false;
12688 struct fixed_point_type_info fpt_info;
12689 tree type_bias = NULL_TREE;
12690
12691 /* If this is a subtype that should not be emitted as a subrange type,
12692 use the base type. See subrange_type_for_debug_p. */
12693 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12694 type = TREE_TYPE (type);
12695
12696 switch (TREE_CODE (type))
12697 {
12698 case INTEGER_TYPE:
12699 if ((dwarf_version >= 4 || !dwarf_strict)
12700 && TYPE_NAME (type)
12701 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12702 && DECL_IS_BUILTIN (TYPE_NAME (type))
12703 && DECL_NAME (TYPE_NAME (type)))
12704 {
12705 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12706 if (strcmp (name, "char16_t") == 0
12707 || strcmp (name, "char32_t") == 0)
12708 {
12709 encoding = DW_ATE_UTF;
12710 break;
12711 }
12712 }
12713 if ((dwarf_version >= 3 || !dwarf_strict)
12714 && lang_hooks.types.get_fixed_point_type_info)
12715 {
12716 memset (&fpt_info, 0, sizeof (fpt_info));
12717 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12718 {
12719 fpt_used = true;
12720 encoding = ((TYPE_UNSIGNED (type))
12721 ? DW_ATE_unsigned_fixed
12722 : DW_ATE_signed_fixed);
12723 break;
12724 }
12725 }
12726 if (TYPE_STRING_FLAG (type))
12727 {
12728 if (TYPE_UNSIGNED (type))
12729 encoding = DW_ATE_unsigned_char;
12730 else
12731 encoding = DW_ATE_signed_char;
12732 }
12733 else if (TYPE_UNSIGNED (type))
12734 encoding = DW_ATE_unsigned;
12735 else
12736 encoding = DW_ATE_signed;
12737
12738 if (!dwarf_strict
12739 && lang_hooks.types.get_type_bias)
12740 type_bias = lang_hooks.types.get_type_bias (type);
12741 break;
12742
12743 case REAL_TYPE:
12744 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12745 {
12746 if (dwarf_version >= 3 || !dwarf_strict)
12747 encoding = DW_ATE_decimal_float;
12748 else
12749 encoding = DW_ATE_lo_user;
12750 }
12751 else
12752 encoding = DW_ATE_float;
12753 break;
12754
12755 case FIXED_POINT_TYPE:
12756 if (!(dwarf_version >= 3 || !dwarf_strict))
12757 encoding = DW_ATE_lo_user;
12758 else if (TYPE_UNSIGNED (type))
12759 encoding = DW_ATE_unsigned_fixed;
12760 else
12761 encoding = DW_ATE_signed_fixed;
12762 break;
12763
12764 /* Dwarf2 doesn't know anything about complex ints, so use
12765 a user defined type for it. */
12766 case COMPLEX_TYPE:
12767 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12768 encoding = DW_ATE_complex_float;
12769 else
12770 encoding = DW_ATE_lo_user;
12771 break;
12772
12773 case BOOLEAN_TYPE:
12774 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12775 encoding = DW_ATE_boolean;
12776 break;
12777
12778 default:
12779 /* No other TREE_CODEs are Dwarf fundamental types. */
12780 gcc_unreachable ();
12781 }
12782
12783 base_type_result = new_die_raw (DW_TAG_base_type);
12784
12785 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12786 int_size_in_bytes (type));
12787 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12788
12789 if (need_endianity_attribute_p (reverse))
12790 add_AT_unsigned (base_type_result, DW_AT_endianity,
12791 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12792
12793 add_alignment_attribute (base_type_result, type);
12794
12795 if (fpt_used)
12796 {
12797 switch (fpt_info.scale_factor_kind)
12798 {
12799 case fixed_point_scale_factor_binary:
12800 add_AT_int (base_type_result, DW_AT_binary_scale,
12801 fpt_info.scale_factor.binary);
12802 break;
12803
12804 case fixed_point_scale_factor_decimal:
12805 add_AT_int (base_type_result, DW_AT_decimal_scale,
12806 fpt_info.scale_factor.decimal);
12807 break;
12808
12809 case fixed_point_scale_factor_arbitrary:
12810 /* Arbitrary scale factors cannot be described in standard DWARF,
12811 yet. */
12812 if (!dwarf_strict)
12813 {
12814 /* Describe the scale factor as a rational constant. */
12815 const dw_die_ref scale_factor
12816 = new_die (DW_TAG_constant, comp_unit_die (), type);
12817
12818 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12819 fpt_info.scale_factor.arbitrary.numerator);
12820 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12821 fpt_info.scale_factor.arbitrary.denominator);
12822
12823 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12824 }
12825 break;
12826
12827 default:
12828 gcc_unreachable ();
12829 }
12830 }
12831
12832 if (type_bias)
12833 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12834 dw_scalar_form_constant
12835 | dw_scalar_form_exprloc
12836 | dw_scalar_form_reference,
12837 NULL);
12838
12839 return base_type_result;
12840 }
12841
12842 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12843 named 'auto' in its type: return true for it, false otherwise. */
12844
12845 static inline bool
12846 is_cxx_auto (tree type)
12847 {
12848 if (is_cxx ())
12849 {
12850 tree name = TYPE_IDENTIFIER (type);
12851 if (name == get_identifier ("auto")
12852 || name == get_identifier ("decltype(auto)"))
12853 return true;
12854 }
12855 return false;
12856 }
12857
12858 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12859 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12860
12861 static inline int
12862 is_base_type (tree type)
12863 {
12864 switch (TREE_CODE (type))
12865 {
12866 case INTEGER_TYPE:
12867 case REAL_TYPE:
12868 case FIXED_POINT_TYPE:
12869 case COMPLEX_TYPE:
12870 case BOOLEAN_TYPE:
12871 case POINTER_BOUNDS_TYPE:
12872 return 1;
12873
12874 case VOID_TYPE:
12875 case ARRAY_TYPE:
12876 case RECORD_TYPE:
12877 case UNION_TYPE:
12878 case QUAL_UNION_TYPE:
12879 case ENUMERAL_TYPE:
12880 case FUNCTION_TYPE:
12881 case METHOD_TYPE:
12882 case POINTER_TYPE:
12883 case REFERENCE_TYPE:
12884 case NULLPTR_TYPE:
12885 case OFFSET_TYPE:
12886 case LANG_TYPE:
12887 case VECTOR_TYPE:
12888 return 0;
12889
12890 default:
12891 if (is_cxx_auto (type))
12892 return 0;
12893 gcc_unreachable ();
12894 }
12895
12896 return 0;
12897 }
12898
12899 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12900 node, return the size in bits for the type if it is a constant, or else
12901 return the alignment for the type if the type's size is not constant, or
12902 else return BITS_PER_WORD if the type actually turns out to be an
12903 ERROR_MARK node. */
12904
12905 static inline unsigned HOST_WIDE_INT
12906 simple_type_size_in_bits (const_tree type)
12907 {
12908 if (TREE_CODE (type) == ERROR_MARK)
12909 return BITS_PER_WORD;
12910 else if (TYPE_SIZE (type) == NULL_TREE)
12911 return 0;
12912 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12913 return tree_to_uhwi (TYPE_SIZE (type));
12914 else
12915 return TYPE_ALIGN (type);
12916 }
12917
12918 /* Similarly, but return an offset_int instead of UHWI. */
12919
12920 static inline offset_int
12921 offset_int_type_size_in_bits (const_tree type)
12922 {
12923 if (TREE_CODE (type) == ERROR_MARK)
12924 return BITS_PER_WORD;
12925 else if (TYPE_SIZE (type) == NULL_TREE)
12926 return 0;
12927 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12928 return wi::to_offset (TYPE_SIZE (type));
12929 else
12930 return TYPE_ALIGN (type);
12931 }
12932
12933 /* Given a pointer to a tree node for a subrange type, return a pointer
12934 to a DIE that describes the given type. */
12935
12936 static dw_die_ref
12937 subrange_type_die (tree type, tree low, tree high, tree bias,
12938 dw_die_ref context_die)
12939 {
12940 dw_die_ref subrange_die;
12941 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12942
12943 if (context_die == NULL)
12944 context_die = comp_unit_die ();
12945
12946 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12947
12948 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12949 {
12950 /* The size of the subrange type and its base type do not match,
12951 so we need to generate a size attribute for the subrange type. */
12952 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12953 }
12954
12955 add_alignment_attribute (subrange_die, type);
12956
12957 if (low)
12958 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12959 if (high)
12960 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12961 if (bias && !dwarf_strict)
12962 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12963 dw_scalar_form_constant
12964 | dw_scalar_form_exprloc
12965 | dw_scalar_form_reference,
12966 NULL);
12967
12968 return subrange_die;
12969 }
12970
12971 /* Returns the (const and/or volatile) cv_qualifiers associated with
12972 the decl node. This will normally be augmented with the
12973 cv_qualifiers of the underlying type in add_type_attribute. */
12974
12975 static int
12976 decl_quals (const_tree decl)
12977 {
12978 return ((TREE_READONLY (decl)
12979 /* The C++ front-end correctly marks reference-typed
12980 variables as readonly, but from a language (and debug
12981 info) standpoint they are not const-qualified. */
12982 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12983 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12984 | (TREE_THIS_VOLATILE (decl)
12985 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12986 }
12987
12988 /* Determine the TYPE whose qualifiers match the largest strict subset
12989 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12990 qualifiers outside QUAL_MASK. */
12991
12992 static int
12993 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12994 {
12995 tree t;
12996 int best_rank = 0, best_qual = 0, max_rank;
12997
12998 type_quals &= qual_mask;
12999 max_rank = popcount_hwi (type_quals) - 1;
13000
13001 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13002 t = TYPE_NEXT_VARIANT (t))
13003 {
13004 int q = TYPE_QUALS (t) & qual_mask;
13005
13006 if ((q & type_quals) == q && q != type_quals
13007 && check_base_type (t, type))
13008 {
13009 int rank = popcount_hwi (q);
13010
13011 if (rank > best_rank)
13012 {
13013 best_rank = rank;
13014 best_qual = q;
13015 }
13016 }
13017 }
13018
13019 return best_qual;
13020 }
13021
13022 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13023 static const dwarf_qual_info_t dwarf_qual_info[] =
13024 {
13025 { TYPE_QUAL_CONST, DW_TAG_const_type },
13026 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13027 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13028 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13029 };
13030 static const unsigned int dwarf_qual_info_size
13031 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13032
13033 /* If DIE is a qualified DIE of some base DIE with the same parent,
13034 return the base DIE, otherwise return NULL. Set MASK to the
13035 qualifiers added compared to the returned DIE. */
13036
13037 static dw_die_ref
13038 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13039 {
13040 unsigned int i;
13041 for (i = 0; i < dwarf_qual_info_size; i++)
13042 if (die->die_tag == dwarf_qual_info[i].t)
13043 break;
13044 if (i == dwarf_qual_info_size)
13045 return NULL;
13046 if (vec_safe_length (die->die_attr) != 1)
13047 return NULL;
13048 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13049 if (type == NULL || type->die_parent != die->die_parent)
13050 return NULL;
13051 *mask |= dwarf_qual_info[i].q;
13052 if (depth)
13053 {
13054 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13055 if (ret)
13056 return ret;
13057 }
13058 return type;
13059 }
13060
13061 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13062 entry that chains the modifiers specified by CV_QUALS in front of the
13063 given type. REVERSE is true if the type is to be interpreted in the
13064 reverse storage order wrt the target order. */
13065
13066 static dw_die_ref
13067 modified_type_die (tree type, int cv_quals, bool reverse,
13068 dw_die_ref context_die)
13069 {
13070 enum tree_code code = TREE_CODE (type);
13071 dw_die_ref mod_type_die;
13072 dw_die_ref sub_die = NULL;
13073 tree item_type = NULL;
13074 tree qualified_type;
13075 tree name, low, high;
13076 dw_die_ref mod_scope;
13077 /* Only these cv-qualifiers are currently handled. */
13078 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13079 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13080 ENCODE_QUAL_ADDR_SPACE(~0U));
13081 const bool reverse_base_type
13082 = need_endianity_attribute_p (reverse) && is_base_type (type);
13083
13084 if (code == ERROR_MARK)
13085 return NULL;
13086
13087 if (lang_hooks.types.get_debug_type)
13088 {
13089 tree debug_type = lang_hooks.types.get_debug_type (type);
13090
13091 if (debug_type != NULL_TREE && debug_type != type)
13092 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13093 }
13094
13095 cv_quals &= cv_qual_mask;
13096
13097 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13098 tag modifier (and not an attribute) old consumers won't be able
13099 to handle it. */
13100 if (dwarf_version < 3)
13101 cv_quals &= ~TYPE_QUAL_RESTRICT;
13102
13103 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13104 if (dwarf_version < 5)
13105 cv_quals &= ~TYPE_QUAL_ATOMIC;
13106
13107 /* See if we already have the appropriately qualified variant of
13108 this type. */
13109 qualified_type = get_qualified_type (type, cv_quals);
13110
13111 if (qualified_type == sizetype)
13112 {
13113 /* Try not to expose the internal sizetype type's name. */
13114 if (TYPE_NAME (qualified_type)
13115 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13116 {
13117 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13118
13119 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13120 && (TYPE_PRECISION (t)
13121 == TYPE_PRECISION (qualified_type))
13122 && (TYPE_UNSIGNED (t)
13123 == TYPE_UNSIGNED (qualified_type)));
13124 qualified_type = t;
13125 }
13126 else if (qualified_type == sizetype
13127 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13128 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13129 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13130 qualified_type = size_type_node;
13131 }
13132
13133 /* If we do, then we can just use its DIE, if it exists. */
13134 if (qualified_type)
13135 {
13136 mod_type_die = lookup_type_die (qualified_type);
13137
13138 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13139 dealt with specially: the DIE with the attribute, if it exists, is
13140 placed immediately after the regular DIE for the same base type. */
13141 if (mod_type_die
13142 && (!reverse_base_type
13143 || ((mod_type_die = mod_type_die->die_sib) != NULL
13144 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13145 return mod_type_die;
13146 }
13147
13148 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13149
13150 /* Handle C typedef types. */
13151 if (name
13152 && TREE_CODE (name) == TYPE_DECL
13153 && DECL_ORIGINAL_TYPE (name)
13154 && !DECL_ARTIFICIAL (name))
13155 {
13156 tree dtype = TREE_TYPE (name);
13157
13158 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13159 if (qualified_type == dtype && !reverse_base_type)
13160 {
13161 tree origin = decl_ultimate_origin (name);
13162
13163 /* Typedef variants that have an abstract origin don't get their own
13164 type DIE (see gen_typedef_die), so fall back on the ultimate
13165 abstract origin instead. */
13166 if (origin != NULL && origin != name)
13167 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13168 context_die);
13169
13170 /* For a named type, use the typedef. */
13171 gen_type_die (qualified_type, context_die);
13172 return lookup_type_die (qualified_type);
13173 }
13174 else
13175 {
13176 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13177 dquals &= cv_qual_mask;
13178 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13179 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13180 /* cv-unqualified version of named type. Just use
13181 the unnamed type to which it refers. */
13182 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13183 reverse, context_die);
13184 /* Else cv-qualified version of named type; fall through. */
13185 }
13186 }
13187
13188 mod_scope = scope_die_for (type, context_die);
13189
13190 if (cv_quals)
13191 {
13192 int sub_quals = 0, first_quals = 0;
13193 unsigned i;
13194 dw_die_ref first = NULL, last = NULL;
13195
13196 /* Determine a lesser qualified type that most closely matches
13197 this one. Then generate DW_TAG_* entries for the remaining
13198 qualifiers. */
13199 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13200 cv_qual_mask);
13201 if (sub_quals && use_debug_types)
13202 {
13203 bool needed = false;
13204 /* If emitting type units, make sure the order of qualifiers
13205 is canonical. Thus, start from unqualified type if
13206 an earlier qualifier is missing in sub_quals, but some later
13207 one is present there. */
13208 for (i = 0; i < dwarf_qual_info_size; i++)
13209 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13210 needed = true;
13211 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13212 {
13213 sub_quals = 0;
13214 break;
13215 }
13216 }
13217 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13218 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13219 {
13220 /* As not all intermediate qualified DIEs have corresponding
13221 tree types, ensure that qualified DIEs in the same scope
13222 as their DW_AT_type are emitted after their DW_AT_type,
13223 only with other qualified DIEs for the same type possibly
13224 in between them. Determine the range of such qualified
13225 DIEs now (first being the base type, last being corresponding
13226 last qualified DIE for it). */
13227 unsigned int count = 0;
13228 first = qualified_die_p (mod_type_die, &first_quals,
13229 dwarf_qual_info_size);
13230 if (first == NULL)
13231 first = mod_type_die;
13232 gcc_assert ((first_quals & ~sub_quals) == 0);
13233 for (count = 0, last = first;
13234 count < (1U << dwarf_qual_info_size);
13235 count++, last = last->die_sib)
13236 {
13237 int quals = 0;
13238 if (last == mod_scope->die_child)
13239 break;
13240 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13241 != first)
13242 break;
13243 }
13244 }
13245
13246 for (i = 0; i < dwarf_qual_info_size; i++)
13247 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13248 {
13249 dw_die_ref d;
13250 if (first && first != last)
13251 {
13252 for (d = first->die_sib; ; d = d->die_sib)
13253 {
13254 int quals = 0;
13255 qualified_die_p (d, &quals, dwarf_qual_info_size);
13256 if (quals == (first_quals | dwarf_qual_info[i].q))
13257 break;
13258 if (d == last)
13259 {
13260 d = NULL;
13261 break;
13262 }
13263 }
13264 if (d)
13265 {
13266 mod_type_die = d;
13267 continue;
13268 }
13269 }
13270 if (first)
13271 {
13272 d = new_die_raw (dwarf_qual_info[i].t);
13273 add_child_die_after (mod_scope, d, last);
13274 last = d;
13275 }
13276 else
13277 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13278 if (mod_type_die)
13279 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13280 mod_type_die = d;
13281 first_quals |= dwarf_qual_info[i].q;
13282 }
13283 }
13284 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13285 {
13286 dwarf_tag tag = DW_TAG_pointer_type;
13287 if (code == REFERENCE_TYPE)
13288 {
13289 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13290 tag = DW_TAG_rvalue_reference_type;
13291 else
13292 tag = DW_TAG_reference_type;
13293 }
13294 mod_type_die = new_die (tag, mod_scope, type);
13295
13296 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13297 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13298 add_alignment_attribute (mod_type_die, type);
13299 item_type = TREE_TYPE (type);
13300
13301 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13302 if (!ADDR_SPACE_GENERIC_P (as))
13303 {
13304 int action = targetm.addr_space.debug (as);
13305 if (action >= 0)
13306 {
13307 /* Positive values indicate an address_class. */
13308 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13309 }
13310 else
13311 {
13312 /* Negative values indicate an (inverted) segment base reg. */
13313 dw_loc_descr_ref d
13314 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13315 add_AT_loc (mod_type_die, DW_AT_segment, d);
13316 }
13317 }
13318 }
13319 else if (code == INTEGER_TYPE
13320 && TREE_TYPE (type) != NULL_TREE
13321 && subrange_type_for_debug_p (type, &low, &high))
13322 {
13323 tree bias = NULL_TREE;
13324 if (lang_hooks.types.get_type_bias)
13325 bias = lang_hooks.types.get_type_bias (type);
13326 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13327 item_type = TREE_TYPE (type);
13328 }
13329 else if (is_base_type (type))
13330 {
13331 mod_type_die = base_type_die (type, reverse);
13332
13333 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13334 if (reverse_base_type)
13335 {
13336 dw_die_ref after_die
13337 = modified_type_die (type, cv_quals, false, context_die);
13338 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13339 }
13340 else
13341 add_child_die (comp_unit_die (), mod_type_die);
13342
13343 add_pubtype (type, mod_type_die);
13344 }
13345 else
13346 {
13347 gen_type_die (type, context_die);
13348
13349 /* We have to get the type_main_variant here (and pass that to the
13350 `lookup_type_die' routine) because the ..._TYPE node we have
13351 might simply be a *copy* of some original type node (where the
13352 copy was created to help us keep track of typedef names) and
13353 that copy might have a different TYPE_UID from the original
13354 ..._TYPE node. */
13355 if (TREE_CODE (type) == FUNCTION_TYPE
13356 || TREE_CODE (type) == METHOD_TYPE)
13357 {
13358 /* For function/method types, can't just use type_main_variant here,
13359 because that can have different ref-qualifiers for C++,
13360 but try to canonicalize. */
13361 tree main = TYPE_MAIN_VARIANT (type);
13362 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13363 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13364 && check_base_type (t, main)
13365 && check_lang_type (t, type))
13366 return lookup_type_die (t);
13367 return lookup_type_die (type);
13368 }
13369 else if (TREE_CODE (type) != VECTOR_TYPE
13370 && TREE_CODE (type) != ARRAY_TYPE)
13371 return lookup_type_die (type_main_variant (type));
13372 else
13373 /* Vectors have the debugging information in the type,
13374 not the main variant. */
13375 return lookup_type_die (type);
13376 }
13377
13378 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13379 don't output a DW_TAG_typedef, since there isn't one in the
13380 user's program; just attach a DW_AT_name to the type.
13381 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13382 if the base type already has the same name. */
13383 if (name
13384 && ((TREE_CODE (name) != TYPE_DECL
13385 && (qualified_type == TYPE_MAIN_VARIANT (type)
13386 || (cv_quals == TYPE_UNQUALIFIED)))
13387 || (TREE_CODE (name) == TYPE_DECL
13388 && TREE_TYPE (name) == qualified_type
13389 && DECL_NAME (name))))
13390 {
13391 if (TREE_CODE (name) == TYPE_DECL)
13392 /* Could just call add_name_and_src_coords_attributes here,
13393 but since this is a builtin type it doesn't have any
13394 useful source coordinates anyway. */
13395 name = DECL_NAME (name);
13396 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13397 }
13398 /* This probably indicates a bug. */
13399 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13400 {
13401 name = TYPE_IDENTIFIER (type);
13402 add_name_attribute (mod_type_die,
13403 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13404 }
13405
13406 if (qualified_type && !reverse_base_type)
13407 equate_type_number_to_die (qualified_type, mod_type_die);
13408
13409 if (item_type)
13410 /* We must do this after the equate_type_number_to_die call, in case
13411 this is a recursive type. This ensures that the modified_type_die
13412 recursion will terminate even if the type is recursive. Recursive
13413 types are possible in Ada. */
13414 sub_die = modified_type_die (item_type,
13415 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13416 reverse,
13417 context_die);
13418
13419 if (sub_die != NULL)
13420 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13421
13422 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13423 if (TYPE_ARTIFICIAL (type))
13424 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13425
13426 return mod_type_die;
13427 }
13428
13429 /* Generate DIEs for the generic parameters of T.
13430 T must be either a generic type or a generic function.
13431 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13432
13433 static void
13434 gen_generic_params_dies (tree t)
13435 {
13436 tree parms, args;
13437 int parms_num, i;
13438 dw_die_ref die = NULL;
13439 int non_default;
13440
13441 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13442 return;
13443
13444 if (TYPE_P (t))
13445 die = lookup_type_die (t);
13446 else if (DECL_P (t))
13447 die = lookup_decl_die (t);
13448
13449 gcc_assert (die);
13450
13451 parms = lang_hooks.get_innermost_generic_parms (t);
13452 if (!parms)
13453 /* T has no generic parameter. It means T is neither a generic type
13454 or function. End of story. */
13455 return;
13456
13457 parms_num = TREE_VEC_LENGTH (parms);
13458 args = lang_hooks.get_innermost_generic_args (t);
13459 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13460 non_default = int_cst_value (TREE_CHAIN (args));
13461 else
13462 non_default = TREE_VEC_LENGTH (args);
13463 for (i = 0; i < parms_num; i++)
13464 {
13465 tree parm, arg, arg_pack_elems;
13466 dw_die_ref parm_die;
13467
13468 parm = TREE_VEC_ELT (parms, i);
13469 arg = TREE_VEC_ELT (args, i);
13470 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13471 gcc_assert (parm && TREE_VALUE (parm) && arg);
13472
13473 if (parm && TREE_VALUE (parm) && arg)
13474 {
13475 /* If PARM represents a template parameter pack,
13476 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13477 by DW_TAG_template_*_parameter DIEs for the argument
13478 pack elements of ARG. Note that ARG would then be
13479 an argument pack. */
13480 if (arg_pack_elems)
13481 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13482 arg_pack_elems,
13483 die);
13484 else
13485 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13486 true /* emit name */, die);
13487 if (i >= non_default)
13488 add_AT_flag (parm_die, DW_AT_default_value, 1);
13489 }
13490 }
13491 }
13492
13493 /* Create and return a DIE for PARM which should be
13494 the representation of a generic type parameter.
13495 For instance, in the C++ front end, PARM would be a template parameter.
13496 ARG is the argument to PARM.
13497 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13498 name of the PARM.
13499 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13500 as a child node. */
13501
13502 static dw_die_ref
13503 generic_parameter_die (tree parm, tree arg,
13504 bool emit_name_p,
13505 dw_die_ref parent_die)
13506 {
13507 dw_die_ref tmpl_die = NULL;
13508 const char *name = NULL;
13509
13510 if (!parm || !DECL_NAME (parm) || !arg)
13511 return NULL;
13512
13513 /* We support non-type generic parameters and arguments,
13514 type generic parameters and arguments, as well as
13515 generic generic parameters (a.k.a. template template parameters in C++)
13516 and arguments. */
13517 if (TREE_CODE (parm) == PARM_DECL)
13518 /* PARM is a nontype generic parameter */
13519 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13520 else if (TREE_CODE (parm) == TYPE_DECL)
13521 /* PARM is a type generic parameter. */
13522 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13523 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13524 /* PARM is a generic generic parameter.
13525 Its DIE is a GNU extension. It shall have a
13526 DW_AT_name attribute to represent the name of the template template
13527 parameter, and a DW_AT_GNU_template_name attribute to represent the
13528 name of the template template argument. */
13529 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13530 parent_die, parm);
13531 else
13532 gcc_unreachable ();
13533
13534 if (tmpl_die)
13535 {
13536 tree tmpl_type;
13537
13538 /* If PARM is a generic parameter pack, it means we are
13539 emitting debug info for a template argument pack element.
13540 In other terms, ARG is a template argument pack element.
13541 In that case, we don't emit any DW_AT_name attribute for
13542 the die. */
13543 if (emit_name_p)
13544 {
13545 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13546 gcc_assert (name);
13547 add_AT_string (tmpl_die, DW_AT_name, name);
13548 }
13549
13550 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13551 {
13552 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13553 TMPL_DIE should have a child DW_AT_type attribute that is set
13554 to the type of the argument to PARM, which is ARG.
13555 If PARM is a type generic parameter, TMPL_DIE should have a
13556 child DW_AT_type that is set to ARG. */
13557 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13558 add_type_attribute (tmpl_die, tmpl_type,
13559 (TREE_THIS_VOLATILE (tmpl_type)
13560 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13561 false, parent_die);
13562 }
13563 else
13564 {
13565 /* So TMPL_DIE is a DIE representing a
13566 a generic generic template parameter, a.k.a template template
13567 parameter in C++ and arg is a template. */
13568
13569 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13570 to the name of the argument. */
13571 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13572 if (name)
13573 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13574 }
13575
13576 if (TREE_CODE (parm) == PARM_DECL)
13577 /* So PARM is a non-type generic parameter.
13578 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13579 attribute of TMPL_DIE which value represents the value
13580 of ARG.
13581 We must be careful here:
13582 The value of ARG might reference some function decls.
13583 We might currently be emitting debug info for a generic
13584 type and types are emitted before function decls, we don't
13585 know if the function decls referenced by ARG will actually be
13586 emitted after cgraph computations.
13587 So must defer the generation of the DW_AT_const_value to
13588 after cgraph is ready. */
13589 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13590 }
13591
13592 return tmpl_die;
13593 }
13594
13595 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13596 PARM_PACK must be a template parameter pack. The returned DIE
13597 will be child DIE of PARENT_DIE. */
13598
13599 static dw_die_ref
13600 template_parameter_pack_die (tree parm_pack,
13601 tree parm_pack_args,
13602 dw_die_ref parent_die)
13603 {
13604 dw_die_ref die;
13605 int j;
13606
13607 gcc_assert (parent_die && parm_pack);
13608
13609 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13610 add_name_and_src_coords_attributes (die, parm_pack);
13611 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13612 generic_parameter_die (parm_pack,
13613 TREE_VEC_ELT (parm_pack_args, j),
13614 false /* Don't emit DW_AT_name */,
13615 die);
13616 return die;
13617 }
13618
13619 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13620 an enumerated type. */
13621
13622 static inline int
13623 type_is_enum (const_tree type)
13624 {
13625 return TREE_CODE (type) == ENUMERAL_TYPE;
13626 }
13627
13628 /* Return the DBX register number described by a given RTL node. */
13629
13630 static unsigned int
13631 dbx_reg_number (const_rtx rtl)
13632 {
13633 unsigned regno = REGNO (rtl);
13634
13635 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13636
13637 #ifdef LEAF_REG_REMAP
13638 if (crtl->uses_only_leaf_regs)
13639 {
13640 int leaf_reg = LEAF_REG_REMAP (regno);
13641 if (leaf_reg != -1)
13642 regno = (unsigned) leaf_reg;
13643 }
13644 #endif
13645
13646 regno = DBX_REGISTER_NUMBER (regno);
13647 gcc_assert (regno != INVALID_REGNUM);
13648 return regno;
13649 }
13650
13651 /* Optionally add a DW_OP_piece term to a location description expression.
13652 DW_OP_piece is only added if the location description expression already
13653 doesn't end with DW_OP_piece. */
13654
13655 static void
13656 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13657 {
13658 dw_loc_descr_ref loc;
13659
13660 if (*list_head != NULL)
13661 {
13662 /* Find the end of the chain. */
13663 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13664 ;
13665
13666 if (loc->dw_loc_opc != DW_OP_piece)
13667 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13668 }
13669 }
13670
13671 /* Return a location descriptor that designates a machine register or
13672 zero if there is none. */
13673
13674 static dw_loc_descr_ref
13675 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13676 {
13677 rtx regs;
13678
13679 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13680 return 0;
13681
13682 /* We only use "frame base" when we're sure we're talking about the
13683 post-prologue local stack frame. We do this by *not* running
13684 register elimination until this point, and recognizing the special
13685 argument pointer and soft frame pointer rtx's.
13686 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13687 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13688 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13689 {
13690 dw_loc_descr_ref result = NULL;
13691
13692 if (dwarf_version >= 4 || !dwarf_strict)
13693 {
13694 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13695 initialized);
13696 if (result)
13697 add_loc_descr (&result,
13698 new_loc_descr (DW_OP_stack_value, 0, 0));
13699 }
13700 return result;
13701 }
13702
13703 regs = targetm.dwarf_register_span (rtl);
13704
13705 if (REG_NREGS (rtl) > 1 || regs)
13706 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13707 else
13708 {
13709 unsigned int dbx_regnum = dbx_reg_number (rtl);
13710 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13711 return 0;
13712 return one_reg_loc_descriptor (dbx_regnum, initialized);
13713 }
13714 }
13715
13716 /* Return a location descriptor that designates a machine register for
13717 a given hard register number. */
13718
13719 static dw_loc_descr_ref
13720 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13721 {
13722 dw_loc_descr_ref reg_loc_descr;
13723
13724 if (regno <= 31)
13725 reg_loc_descr
13726 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13727 else
13728 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13729
13730 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13731 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13732
13733 return reg_loc_descr;
13734 }
13735
13736 /* Given an RTL of a register, return a location descriptor that
13737 designates a value that spans more than one register. */
13738
13739 static dw_loc_descr_ref
13740 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13741 enum var_init_status initialized)
13742 {
13743 int size, i;
13744 dw_loc_descr_ref loc_result = NULL;
13745
13746 /* Simple, contiguous registers. */
13747 if (regs == NULL_RTX)
13748 {
13749 unsigned reg = REGNO (rtl);
13750 int nregs;
13751
13752 #ifdef LEAF_REG_REMAP
13753 if (crtl->uses_only_leaf_regs)
13754 {
13755 int leaf_reg = LEAF_REG_REMAP (reg);
13756 if (leaf_reg != -1)
13757 reg = (unsigned) leaf_reg;
13758 }
13759 #endif
13760
13761 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13762 nregs = REG_NREGS (rtl);
13763
13764 /* At present we only track constant-sized pieces. */
13765 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13766 return NULL;
13767 size /= nregs;
13768
13769 loc_result = NULL;
13770 while (nregs--)
13771 {
13772 dw_loc_descr_ref t;
13773
13774 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13775 VAR_INIT_STATUS_INITIALIZED);
13776 add_loc_descr (&loc_result, t);
13777 add_loc_descr_op_piece (&loc_result, size);
13778 ++reg;
13779 }
13780 return loc_result;
13781 }
13782
13783 /* Now onto stupid register sets in non contiguous locations. */
13784
13785 gcc_assert (GET_CODE (regs) == PARALLEL);
13786
13787 /* At present we only track constant-sized pieces. */
13788 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13789 return NULL;
13790 loc_result = NULL;
13791
13792 for (i = 0; i < XVECLEN (regs, 0); ++i)
13793 {
13794 dw_loc_descr_ref t;
13795
13796 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13797 VAR_INIT_STATUS_INITIALIZED);
13798 add_loc_descr (&loc_result, t);
13799 add_loc_descr_op_piece (&loc_result, size);
13800 }
13801
13802 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13803 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13804 return loc_result;
13805 }
13806
13807 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13808
13809 /* Return a location descriptor that designates a constant i,
13810 as a compound operation from constant (i >> shift), constant shift
13811 and DW_OP_shl. */
13812
13813 static dw_loc_descr_ref
13814 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13815 {
13816 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13817 add_loc_descr (&ret, int_loc_descriptor (shift));
13818 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13819 return ret;
13820 }
13821
13822 /* Return a location descriptor that designates constant POLY_I. */
13823
13824 static dw_loc_descr_ref
13825 int_loc_descriptor (poly_int64 poly_i)
13826 {
13827 enum dwarf_location_atom op;
13828
13829 HOST_WIDE_INT i;
13830 if (!poly_i.is_constant (&i))
13831 {
13832 /* Create location descriptions for the non-constant part and
13833 add any constant offset at the end. */
13834 dw_loc_descr_ref ret = NULL;
13835 HOST_WIDE_INT constant = poly_i.coeffs[0];
13836 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13837 {
13838 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13839 if (coeff != 0)
13840 {
13841 dw_loc_descr_ref start = ret;
13842 unsigned int factor;
13843 int bias;
13844 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13845 (j, &factor, &bias);
13846
13847 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13848 add COEFF * (REGNO / FACTOR) now and subtract
13849 COEFF * BIAS from the final constant part. */
13850 constant -= coeff * bias;
13851 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13852 if (coeff % factor == 0)
13853 coeff /= factor;
13854 else
13855 {
13856 int amount = exact_log2 (factor);
13857 gcc_assert (amount >= 0);
13858 add_loc_descr (&ret, int_loc_descriptor (amount));
13859 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13860 }
13861 if (coeff != 1)
13862 {
13863 add_loc_descr (&ret, int_loc_descriptor (coeff));
13864 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13865 }
13866 if (start)
13867 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13868 }
13869 }
13870 loc_descr_plus_const (&ret, constant);
13871 return ret;
13872 }
13873
13874 /* Pick the smallest representation of a constant, rather than just
13875 defaulting to the LEB encoding. */
13876 if (i >= 0)
13877 {
13878 int clz = clz_hwi (i);
13879 int ctz = ctz_hwi (i);
13880 if (i <= 31)
13881 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13882 else if (i <= 0xff)
13883 op = DW_OP_const1u;
13884 else if (i <= 0xffff)
13885 op = DW_OP_const2u;
13886 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13887 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13888 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13889 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13890 while DW_OP_const4u is 5 bytes. */
13891 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13892 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13893 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13894 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13895 while DW_OP_const4u is 5 bytes. */
13896 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13897
13898 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13899 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13900 <= 4)
13901 {
13902 /* As i >= 2**31, the double cast above will yield a negative number.
13903 Since wrapping is defined in DWARF expressions we can output big
13904 positive integers as small negative ones, regardless of the size
13905 of host wide ints.
13906
13907 Here, since the evaluator will handle 32-bit values and since i >=
13908 2**31, we know it's going to be interpreted as a negative literal:
13909 store it this way if we can do better than 5 bytes this way. */
13910 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13911 }
13912 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13913 op = DW_OP_const4u;
13914
13915 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13916 least 6 bytes: see if we can do better before falling back to it. */
13917 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13918 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13919 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13920 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13921 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13922 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13923 >= HOST_BITS_PER_WIDE_INT)
13924 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13925 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13926 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13927 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13928 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13929 && size_of_uleb128 (i) > 6)
13930 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13931 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13932 else
13933 op = DW_OP_constu;
13934 }
13935 else
13936 {
13937 if (i >= -0x80)
13938 op = DW_OP_const1s;
13939 else if (i >= -0x8000)
13940 op = DW_OP_const2s;
13941 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13942 {
13943 if (size_of_int_loc_descriptor (i) < 5)
13944 {
13945 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13946 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13947 return ret;
13948 }
13949 op = DW_OP_const4s;
13950 }
13951 else
13952 {
13953 if (size_of_int_loc_descriptor (i)
13954 < (unsigned long) 1 + size_of_sleb128 (i))
13955 {
13956 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13957 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13958 return ret;
13959 }
13960 op = DW_OP_consts;
13961 }
13962 }
13963
13964 return new_loc_descr (op, i, 0);
13965 }
13966
13967 /* Likewise, for unsigned constants. */
13968
13969 static dw_loc_descr_ref
13970 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13971 {
13972 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13973 const unsigned HOST_WIDE_INT max_uint
13974 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13975
13976 /* If possible, use the clever signed constants handling. */
13977 if (i <= max_int)
13978 return int_loc_descriptor ((HOST_WIDE_INT) i);
13979
13980 /* Here, we are left with positive numbers that cannot be represented as
13981 HOST_WIDE_INT, i.e.:
13982 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13983
13984 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13985 whereas may be better to output a negative integer: thanks to integer
13986 wrapping, we know that:
13987 x = x - 2 ** DWARF2_ADDR_SIZE
13988 = x - 2 * (max (HOST_WIDE_INT) + 1)
13989 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13990 small negative integers. Let's try that in cases it will clearly improve
13991 the encoding: there is no gain turning DW_OP_const4u into
13992 DW_OP_const4s. */
13993 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13994 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13995 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13996 {
13997 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13998
13999 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14000 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14001 const HOST_WIDE_INT second_shift
14002 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14003
14004 /* So we finally have:
14005 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14006 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14007 return int_loc_descriptor (second_shift);
14008 }
14009
14010 /* Last chance: fallback to a simple constant operation. */
14011 return new_loc_descr
14012 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14013 ? DW_OP_const4u
14014 : DW_OP_const8u,
14015 i, 0);
14016 }
14017
14018 /* Generate and return a location description that computes the unsigned
14019 comparison of the two stack top entries (a OP b where b is the top-most
14020 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14021 LE_EXPR, GT_EXPR or GE_EXPR. */
14022
14023 static dw_loc_descr_ref
14024 uint_comparison_loc_list (enum tree_code kind)
14025 {
14026 enum dwarf_location_atom op, flip_op;
14027 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14028
14029 switch (kind)
14030 {
14031 case LT_EXPR:
14032 op = DW_OP_lt;
14033 break;
14034 case LE_EXPR:
14035 op = DW_OP_le;
14036 break;
14037 case GT_EXPR:
14038 op = DW_OP_gt;
14039 break;
14040 case GE_EXPR:
14041 op = DW_OP_ge;
14042 break;
14043 default:
14044 gcc_unreachable ();
14045 }
14046
14047 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14048 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14049
14050 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14051 possible to perform unsigned comparisons: we just have to distinguish
14052 three cases:
14053
14054 1. when a and b have the same sign (as signed integers); then we should
14055 return: a OP(signed) b;
14056
14057 2. when a is a negative signed integer while b is a positive one, then a
14058 is a greater unsigned integer than b; likewise when a and b's roles
14059 are flipped.
14060
14061 So first, compare the sign of the two operands. */
14062 ret = new_loc_descr (DW_OP_over, 0, 0);
14063 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14064 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14065 /* If they have different signs (i.e. they have different sign bits), then
14066 the stack top value has now the sign bit set and thus it's smaller than
14067 zero. */
14068 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14069 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14070 add_loc_descr (&ret, bra_node);
14071
14072 /* We are in case 1. At this point, we know both operands have the same
14073 sign, to it's safe to use the built-in signed comparison. */
14074 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14075 add_loc_descr (&ret, jmp_node);
14076
14077 /* We are in case 2. Here, we know both operands do not have the same sign,
14078 so we have to flip the signed comparison. */
14079 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14080 tmp = new_loc_descr (flip_op, 0, 0);
14081 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14082 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14083 add_loc_descr (&ret, tmp);
14084
14085 /* This dummy operation is necessary to make the two branches join. */
14086 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14087 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14088 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14089 add_loc_descr (&ret, tmp);
14090
14091 return ret;
14092 }
14093
14094 /* Likewise, but takes the location description lists (might be destructive on
14095 them). Return NULL if either is NULL or if concatenation fails. */
14096
14097 static dw_loc_list_ref
14098 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14099 enum tree_code kind)
14100 {
14101 if (left == NULL || right == NULL)
14102 return NULL;
14103
14104 add_loc_list (&left, right);
14105 if (left == NULL)
14106 return NULL;
14107
14108 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14109 return left;
14110 }
14111
14112 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14113 without actually allocating it. */
14114
14115 static unsigned long
14116 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14117 {
14118 return size_of_int_loc_descriptor (i >> shift)
14119 + size_of_int_loc_descriptor (shift)
14120 + 1;
14121 }
14122
14123 /* Return size_of_locs (int_loc_descriptor (i)) without
14124 actually allocating it. */
14125
14126 static unsigned long
14127 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14128 {
14129 unsigned long s;
14130
14131 if (i >= 0)
14132 {
14133 int clz, ctz;
14134 if (i <= 31)
14135 return 1;
14136 else if (i <= 0xff)
14137 return 2;
14138 else if (i <= 0xffff)
14139 return 3;
14140 clz = clz_hwi (i);
14141 ctz = ctz_hwi (i);
14142 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14143 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14144 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14145 - clz - 5);
14146 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14147 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14148 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14149 - clz - 8);
14150 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14151 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14152 <= 4)
14153 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14154 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14155 return 5;
14156 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14157 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14158 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14159 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14160 - clz - 8);
14161 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14162 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14163 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14164 - clz - 16);
14165 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14166 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14167 && s > 6)
14168 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14169 - clz - 32);
14170 else
14171 return 1 + s;
14172 }
14173 else
14174 {
14175 if (i >= -0x80)
14176 return 2;
14177 else if (i >= -0x8000)
14178 return 3;
14179 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14180 {
14181 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14182 {
14183 s = size_of_int_loc_descriptor (-i) + 1;
14184 if (s < 5)
14185 return s;
14186 }
14187 return 5;
14188 }
14189 else
14190 {
14191 unsigned long r = 1 + size_of_sleb128 (i);
14192 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14193 {
14194 s = size_of_int_loc_descriptor (-i) + 1;
14195 if (s < r)
14196 return s;
14197 }
14198 return r;
14199 }
14200 }
14201 }
14202
14203 /* Return loc description representing "address" of integer value.
14204 This can appear only as toplevel expression. */
14205
14206 static dw_loc_descr_ref
14207 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14208 {
14209 int litsize;
14210 dw_loc_descr_ref loc_result = NULL;
14211
14212 if (!(dwarf_version >= 4 || !dwarf_strict))
14213 return NULL;
14214
14215 litsize = size_of_int_loc_descriptor (i);
14216 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14217 is more compact. For DW_OP_stack_value we need:
14218 litsize + 1 (DW_OP_stack_value)
14219 and for DW_OP_implicit_value:
14220 1 (DW_OP_implicit_value) + 1 (length) + size. */
14221 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14222 {
14223 loc_result = int_loc_descriptor (i);
14224 add_loc_descr (&loc_result,
14225 new_loc_descr (DW_OP_stack_value, 0, 0));
14226 return loc_result;
14227 }
14228
14229 loc_result = new_loc_descr (DW_OP_implicit_value,
14230 size, 0);
14231 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14232 loc_result->dw_loc_oprnd2.v.val_int = i;
14233 return loc_result;
14234 }
14235
14236 /* Return a location descriptor that designates a base+offset location. */
14237
14238 static dw_loc_descr_ref
14239 based_loc_descr (rtx reg, poly_int64 offset,
14240 enum var_init_status initialized)
14241 {
14242 unsigned int regno;
14243 dw_loc_descr_ref result;
14244 dw_fde_ref fde = cfun->fde;
14245
14246 /* We only use "frame base" when we're sure we're talking about the
14247 post-prologue local stack frame. We do this by *not* running
14248 register elimination until this point, and recognizing the special
14249 argument pointer and soft frame pointer rtx's. */
14250 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14251 {
14252 rtx elim = (ira_use_lra_p
14253 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14254 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14255
14256 if (elim != reg)
14257 {
14258 elim = strip_offset_and_add (elim, &offset);
14259 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14260 && (elim == hard_frame_pointer_rtx
14261 || elim == stack_pointer_rtx))
14262 || elim == (frame_pointer_needed
14263 ? hard_frame_pointer_rtx
14264 : stack_pointer_rtx));
14265
14266 /* If drap register is used to align stack, use frame
14267 pointer + offset to access stack variables. If stack
14268 is aligned without drap, use stack pointer + offset to
14269 access stack variables. */
14270 if (crtl->stack_realign_tried
14271 && reg == frame_pointer_rtx)
14272 {
14273 int base_reg
14274 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14275 ? HARD_FRAME_POINTER_REGNUM
14276 : REGNO (elim));
14277 return new_reg_loc_descr (base_reg, offset);
14278 }
14279
14280 gcc_assert (frame_pointer_fb_offset_valid);
14281 offset += frame_pointer_fb_offset;
14282 HOST_WIDE_INT const_offset;
14283 if (offset.is_constant (&const_offset))
14284 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14285 else
14286 {
14287 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14288 loc_descr_plus_const (&ret, offset);
14289 return ret;
14290 }
14291 }
14292 }
14293
14294 regno = REGNO (reg);
14295 #ifdef LEAF_REG_REMAP
14296 if (crtl->uses_only_leaf_regs)
14297 {
14298 int leaf_reg = LEAF_REG_REMAP (regno);
14299 if (leaf_reg != -1)
14300 regno = (unsigned) leaf_reg;
14301 }
14302 #endif
14303 regno = DWARF_FRAME_REGNUM (regno);
14304
14305 HOST_WIDE_INT const_offset;
14306 if (!optimize && fde
14307 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14308 && offset.is_constant (&const_offset))
14309 {
14310 /* Use cfa+offset to represent the location of arguments passed
14311 on the stack when drap is used to align stack.
14312 Only do this when not optimizing, for optimized code var-tracking
14313 is supposed to track where the arguments live and the register
14314 used as vdrap or drap in some spot might be used for something
14315 else in other part of the routine. */
14316 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14317 }
14318
14319 result = new_reg_loc_descr (regno, offset);
14320
14321 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14322 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14323
14324 return result;
14325 }
14326
14327 /* Return true if this RTL expression describes a base+offset calculation. */
14328
14329 static inline int
14330 is_based_loc (const_rtx rtl)
14331 {
14332 return (GET_CODE (rtl) == PLUS
14333 && ((REG_P (XEXP (rtl, 0))
14334 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14335 && CONST_INT_P (XEXP (rtl, 1)))));
14336 }
14337
14338 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14339 failed. */
14340
14341 static dw_loc_descr_ref
14342 tls_mem_loc_descriptor (rtx mem)
14343 {
14344 tree base;
14345 dw_loc_descr_ref loc_result;
14346
14347 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14348 return NULL;
14349
14350 base = get_base_address (MEM_EXPR (mem));
14351 if (base == NULL
14352 || !VAR_P (base)
14353 || !DECL_THREAD_LOCAL_P (base))
14354 return NULL;
14355
14356 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14357 if (loc_result == NULL)
14358 return NULL;
14359
14360 if (maybe_ne (MEM_OFFSET (mem), 0))
14361 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14362
14363 return loc_result;
14364 }
14365
14366 /* Output debug info about reason why we failed to expand expression as dwarf
14367 expression. */
14368
14369 static void
14370 expansion_failed (tree expr, rtx rtl, char const *reason)
14371 {
14372 if (dump_file && (dump_flags & TDF_DETAILS))
14373 {
14374 fprintf (dump_file, "Failed to expand as dwarf: ");
14375 if (expr)
14376 print_generic_expr (dump_file, expr, dump_flags);
14377 if (rtl)
14378 {
14379 fprintf (dump_file, "\n");
14380 print_rtl (dump_file, rtl);
14381 }
14382 fprintf (dump_file, "\nReason: %s\n", reason);
14383 }
14384 }
14385
14386 /* Helper function for const_ok_for_output. */
14387
14388 static bool
14389 const_ok_for_output_1 (rtx rtl)
14390 {
14391 if (targetm.const_not_ok_for_debug_p (rtl))
14392 {
14393 if (GET_CODE (rtl) != UNSPEC)
14394 {
14395 expansion_failed (NULL_TREE, rtl,
14396 "Expression rejected for debug by the backend.\n");
14397 return false;
14398 }
14399
14400 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14401 the target hook doesn't explicitly allow it in debug info, assume
14402 we can't express it in the debug info. */
14403 /* Don't complain about TLS UNSPECs, those are just too hard to
14404 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14405 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14406 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14407 if (flag_checking
14408 && (XVECLEN (rtl, 0) == 0
14409 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14410 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14411 inform (current_function_decl
14412 ? DECL_SOURCE_LOCATION (current_function_decl)
14413 : UNKNOWN_LOCATION,
14414 #if NUM_UNSPEC_VALUES > 0
14415 "non-delegitimized UNSPEC %s (%d) found in variable location",
14416 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14417 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14418 XINT (rtl, 1));
14419 #else
14420 "non-delegitimized UNSPEC %d found in variable location",
14421 XINT (rtl, 1));
14422 #endif
14423 expansion_failed (NULL_TREE, rtl,
14424 "UNSPEC hasn't been delegitimized.\n");
14425 return false;
14426 }
14427
14428 if (CONST_POLY_INT_P (rtl))
14429 return false;
14430
14431 if (targetm.const_not_ok_for_debug_p (rtl))
14432 {
14433 expansion_failed (NULL_TREE, rtl,
14434 "Expression rejected for debug by the backend.\n");
14435 return false;
14436 }
14437
14438 /* FIXME: Refer to PR60655. It is possible for simplification
14439 of rtl expressions in var tracking to produce such expressions.
14440 We should really identify / validate expressions
14441 enclosed in CONST that can be handled by assemblers on various
14442 targets and only handle legitimate cases here. */
14443 switch (GET_CODE (rtl))
14444 {
14445 case SYMBOL_REF:
14446 break;
14447 case NOT:
14448 case NEG:
14449 return false;
14450 default:
14451 return true;
14452 }
14453
14454 if (CONSTANT_POOL_ADDRESS_P (rtl))
14455 {
14456 bool marked;
14457 get_pool_constant_mark (rtl, &marked);
14458 /* If all references to this pool constant were optimized away,
14459 it was not output and thus we can't represent it. */
14460 if (!marked)
14461 {
14462 expansion_failed (NULL_TREE, rtl,
14463 "Constant was removed from constant pool.\n");
14464 return false;
14465 }
14466 }
14467
14468 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14469 return false;
14470
14471 /* Avoid references to external symbols in debug info, on several targets
14472 the linker might even refuse to link when linking a shared library,
14473 and in many other cases the relocations for .debug_info/.debug_loc are
14474 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14475 to be defined within the same shared library or executable are fine. */
14476 if (SYMBOL_REF_EXTERNAL_P (rtl))
14477 {
14478 tree decl = SYMBOL_REF_DECL (rtl);
14479
14480 if (decl == NULL || !targetm.binds_local_p (decl))
14481 {
14482 expansion_failed (NULL_TREE, rtl,
14483 "Symbol not defined in current TU.\n");
14484 return false;
14485 }
14486 }
14487
14488 return true;
14489 }
14490
14491 /* Return true if constant RTL can be emitted in DW_OP_addr or
14492 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14493 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14494
14495 static bool
14496 const_ok_for_output (rtx rtl)
14497 {
14498 if (GET_CODE (rtl) == SYMBOL_REF)
14499 return const_ok_for_output_1 (rtl);
14500
14501 if (GET_CODE (rtl) == CONST)
14502 {
14503 subrtx_var_iterator::array_type array;
14504 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14505 if (!const_ok_for_output_1 (*iter))
14506 return false;
14507 return true;
14508 }
14509
14510 return true;
14511 }
14512
14513 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14514 if possible, NULL otherwise. */
14515
14516 static dw_die_ref
14517 base_type_for_mode (machine_mode mode, bool unsignedp)
14518 {
14519 dw_die_ref type_die;
14520 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14521
14522 if (type == NULL)
14523 return NULL;
14524 switch (TREE_CODE (type))
14525 {
14526 case INTEGER_TYPE:
14527 case REAL_TYPE:
14528 break;
14529 default:
14530 return NULL;
14531 }
14532 type_die = lookup_type_die (type);
14533 if (!type_die)
14534 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14535 comp_unit_die ());
14536 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14537 return NULL;
14538 return type_die;
14539 }
14540
14541 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14542 type matching MODE, or, if MODE is narrower than or as wide as
14543 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14544 possible. */
14545
14546 static dw_loc_descr_ref
14547 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14548 {
14549 machine_mode outer_mode = mode;
14550 dw_die_ref type_die;
14551 dw_loc_descr_ref cvt;
14552
14553 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14554 {
14555 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14556 return op;
14557 }
14558 type_die = base_type_for_mode (outer_mode, 1);
14559 if (type_die == NULL)
14560 return NULL;
14561 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14562 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14563 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14564 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14565 add_loc_descr (&op, cvt);
14566 return op;
14567 }
14568
14569 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14570
14571 static dw_loc_descr_ref
14572 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14573 dw_loc_descr_ref op1)
14574 {
14575 dw_loc_descr_ref ret = op0;
14576 add_loc_descr (&ret, op1);
14577 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14578 if (STORE_FLAG_VALUE != 1)
14579 {
14580 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14581 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14582 }
14583 return ret;
14584 }
14585
14586 /* Subroutine of scompare_loc_descriptor for the case in which we're
14587 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14588 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14589
14590 static dw_loc_descr_ref
14591 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14592 scalar_int_mode op_mode,
14593 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14594 {
14595 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14596 dw_loc_descr_ref cvt;
14597
14598 if (type_die == NULL)
14599 return NULL;
14600 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14601 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14602 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14603 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14604 add_loc_descr (&op0, cvt);
14605 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14606 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14607 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14608 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14609 add_loc_descr (&op1, cvt);
14610 return compare_loc_descriptor (op, op0, op1);
14611 }
14612
14613 /* Subroutine of scompare_loc_descriptor for the case in which we're
14614 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14615 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14616
14617 static dw_loc_descr_ref
14618 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14619 scalar_int_mode op_mode,
14620 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14621 {
14622 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14623 /* For eq/ne, if the operands are known to be zero-extended,
14624 there is no need to do the fancy shifting up. */
14625 if (op == DW_OP_eq || op == DW_OP_ne)
14626 {
14627 dw_loc_descr_ref last0, last1;
14628 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14629 ;
14630 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14631 ;
14632 /* deref_size zero extends, and for constants we can check
14633 whether they are zero extended or not. */
14634 if (((last0->dw_loc_opc == DW_OP_deref_size
14635 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14636 || (CONST_INT_P (XEXP (rtl, 0))
14637 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14638 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14639 && ((last1->dw_loc_opc == DW_OP_deref_size
14640 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14641 || (CONST_INT_P (XEXP (rtl, 1))
14642 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14643 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14644 return compare_loc_descriptor (op, op0, op1);
14645
14646 /* EQ/NE comparison against constant in narrower type than
14647 DWARF2_ADDR_SIZE can be performed either as
14648 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14649 DW_OP_{eq,ne}
14650 or
14651 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14652 DW_OP_{eq,ne}. Pick whatever is shorter. */
14653 if (CONST_INT_P (XEXP (rtl, 1))
14654 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14655 && (size_of_int_loc_descriptor (shift) + 1
14656 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14657 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14658 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14659 & GET_MODE_MASK (op_mode))))
14660 {
14661 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14662 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14663 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14664 & GET_MODE_MASK (op_mode));
14665 return compare_loc_descriptor (op, op0, op1);
14666 }
14667 }
14668 add_loc_descr (&op0, int_loc_descriptor (shift));
14669 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14670 if (CONST_INT_P (XEXP (rtl, 1)))
14671 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14672 else
14673 {
14674 add_loc_descr (&op1, int_loc_descriptor (shift));
14675 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14676 }
14677 return compare_loc_descriptor (op, op0, op1);
14678 }
14679
14680 /* Return location descriptor for unsigned comparison OP RTL. */
14681
14682 static dw_loc_descr_ref
14683 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14684 machine_mode mem_mode)
14685 {
14686 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14687 dw_loc_descr_ref op0, op1;
14688
14689 if (op_mode == VOIDmode)
14690 op_mode = GET_MODE (XEXP (rtl, 1));
14691 if (op_mode == VOIDmode)
14692 return NULL;
14693
14694 scalar_int_mode int_op_mode;
14695 if (dwarf_strict
14696 && dwarf_version < 5
14697 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14698 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14699 return NULL;
14700
14701 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14702 VAR_INIT_STATUS_INITIALIZED);
14703 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14704 VAR_INIT_STATUS_INITIALIZED);
14705
14706 if (op0 == NULL || op1 == NULL)
14707 return NULL;
14708
14709 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14710 {
14711 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14712 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14713
14714 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14715 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14716 }
14717 return compare_loc_descriptor (op, op0, op1);
14718 }
14719
14720 /* Return location descriptor for unsigned comparison OP RTL. */
14721
14722 static dw_loc_descr_ref
14723 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14724 machine_mode mem_mode)
14725 {
14726 dw_loc_descr_ref op0, op1;
14727
14728 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14729 if (test_op_mode == VOIDmode)
14730 test_op_mode = GET_MODE (XEXP (rtl, 1));
14731
14732 scalar_int_mode op_mode;
14733 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14734 return NULL;
14735
14736 if (dwarf_strict
14737 && dwarf_version < 5
14738 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14739 return NULL;
14740
14741 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14742 VAR_INIT_STATUS_INITIALIZED);
14743 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14744 VAR_INIT_STATUS_INITIALIZED);
14745
14746 if (op0 == NULL || op1 == NULL)
14747 return NULL;
14748
14749 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14750 {
14751 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14752 dw_loc_descr_ref last0, last1;
14753 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14754 ;
14755 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14756 ;
14757 if (CONST_INT_P (XEXP (rtl, 0)))
14758 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14759 /* deref_size zero extends, so no need to mask it again. */
14760 else if (last0->dw_loc_opc != DW_OP_deref_size
14761 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14762 {
14763 add_loc_descr (&op0, int_loc_descriptor (mask));
14764 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14765 }
14766 if (CONST_INT_P (XEXP (rtl, 1)))
14767 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14768 /* deref_size zero extends, so no need to mask it again. */
14769 else if (last1->dw_loc_opc != DW_OP_deref_size
14770 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14771 {
14772 add_loc_descr (&op1, int_loc_descriptor (mask));
14773 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14774 }
14775 }
14776 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14777 {
14778 HOST_WIDE_INT bias = 1;
14779 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14780 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14781 if (CONST_INT_P (XEXP (rtl, 1)))
14782 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14783 + INTVAL (XEXP (rtl, 1)));
14784 else
14785 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14786 bias, 0));
14787 }
14788 return compare_loc_descriptor (op, op0, op1);
14789 }
14790
14791 /* Return location descriptor for {U,S}{MIN,MAX}. */
14792
14793 static dw_loc_descr_ref
14794 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14795 machine_mode mem_mode)
14796 {
14797 enum dwarf_location_atom op;
14798 dw_loc_descr_ref op0, op1, ret;
14799 dw_loc_descr_ref bra_node, drop_node;
14800
14801 scalar_int_mode int_mode;
14802 if (dwarf_strict
14803 && dwarf_version < 5
14804 && (!is_a <scalar_int_mode> (mode, &int_mode)
14805 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14806 return NULL;
14807
14808 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14809 VAR_INIT_STATUS_INITIALIZED);
14810 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14811 VAR_INIT_STATUS_INITIALIZED);
14812
14813 if (op0 == NULL || op1 == NULL)
14814 return NULL;
14815
14816 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14817 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14818 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14819 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14820 {
14821 /* Checked by the caller. */
14822 int_mode = as_a <scalar_int_mode> (mode);
14823 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14824 {
14825 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14826 add_loc_descr (&op0, int_loc_descriptor (mask));
14827 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14828 add_loc_descr (&op1, int_loc_descriptor (mask));
14829 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14830 }
14831 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14832 {
14833 HOST_WIDE_INT bias = 1;
14834 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14835 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14836 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14837 }
14838 }
14839 else if (is_a <scalar_int_mode> (mode, &int_mode)
14840 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14841 {
14842 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14843 add_loc_descr (&op0, int_loc_descriptor (shift));
14844 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14845 add_loc_descr (&op1, int_loc_descriptor (shift));
14846 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14847 }
14848 else if (is_a <scalar_int_mode> (mode, &int_mode)
14849 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14850 {
14851 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14852 dw_loc_descr_ref cvt;
14853 if (type_die == NULL)
14854 return NULL;
14855 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14856 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14857 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14858 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14859 add_loc_descr (&op0, cvt);
14860 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14861 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14862 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14863 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14864 add_loc_descr (&op1, cvt);
14865 }
14866
14867 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14868 op = DW_OP_lt;
14869 else
14870 op = DW_OP_gt;
14871 ret = op0;
14872 add_loc_descr (&ret, op1);
14873 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14874 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14875 add_loc_descr (&ret, bra_node);
14876 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14877 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14878 add_loc_descr (&ret, drop_node);
14879 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14880 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14881 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14882 && is_a <scalar_int_mode> (mode, &int_mode)
14883 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14884 ret = convert_descriptor_to_mode (int_mode, ret);
14885 return ret;
14886 }
14887
14888 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14889 but after converting arguments to type_die, afterwards
14890 convert back to unsigned. */
14891
14892 static dw_loc_descr_ref
14893 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14894 scalar_int_mode mode, machine_mode mem_mode)
14895 {
14896 dw_loc_descr_ref cvt, op0, op1;
14897
14898 if (type_die == NULL)
14899 return NULL;
14900 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14901 VAR_INIT_STATUS_INITIALIZED);
14902 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14903 VAR_INIT_STATUS_INITIALIZED);
14904 if (op0 == NULL || op1 == NULL)
14905 return NULL;
14906 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14907 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14908 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14909 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14910 add_loc_descr (&op0, cvt);
14911 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14912 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14913 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14914 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14915 add_loc_descr (&op1, cvt);
14916 add_loc_descr (&op0, op1);
14917 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14918 return convert_descriptor_to_mode (mode, op0);
14919 }
14920
14921 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14922 const0 is DW_OP_lit0 or corresponding typed constant,
14923 const1 is DW_OP_lit1 or corresponding typed constant
14924 and constMSB is constant with just the MSB bit set
14925 for the mode):
14926 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14927 L1: const0 DW_OP_swap
14928 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14929 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14930 L3: DW_OP_drop
14931 L4: DW_OP_nop
14932
14933 CTZ is similar:
14934 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14935 L1: const0 DW_OP_swap
14936 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14937 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14938 L3: DW_OP_drop
14939 L4: DW_OP_nop
14940
14941 FFS is similar:
14942 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14943 L1: const1 DW_OP_swap
14944 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14945 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14946 L3: DW_OP_drop
14947 L4: DW_OP_nop */
14948
14949 static dw_loc_descr_ref
14950 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14951 machine_mode mem_mode)
14952 {
14953 dw_loc_descr_ref op0, ret, tmp;
14954 HOST_WIDE_INT valv;
14955 dw_loc_descr_ref l1jump, l1label;
14956 dw_loc_descr_ref l2jump, l2label;
14957 dw_loc_descr_ref l3jump, l3label;
14958 dw_loc_descr_ref l4jump, l4label;
14959 rtx msb;
14960
14961 if (GET_MODE (XEXP (rtl, 0)) != mode)
14962 return NULL;
14963
14964 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14965 VAR_INIT_STATUS_INITIALIZED);
14966 if (op0 == NULL)
14967 return NULL;
14968 ret = op0;
14969 if (GET_CODE (rtl) == CLZ)
14970 {
14971 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14972 valv = GET_MODE_BITSIZE (mode);
14973 }
14974 else if (GET_CODE (rtl) == FFS)
14975 valv = 0;
14976 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14977 valv = GET_MODE_BITSIZE (mode);
14978 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14979 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14980 add_loc_descr (&ret, l1jump);
14981 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14982 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14983 VAR_INIT_STATUS_INITIALIZED);
14984 if (tmp == NULL)
14985 return NULL;
14986 add_loc_descr (&ret, tmp);
14987 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14988 add_loc_descr (&ret, l4jump);
14989 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14990 ? const1_rtx : const0_rtx,
14991 mode, mem_mode,
14992 VAR_INIT_STATUS_INITIALIZED);
14993 if (l1label == NULL)
14994 return NULL;
14995 add_loc_descr (&ret, l1label);
14996 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14997 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14998 add_loc_descr (&ret, l2label);
14999 if (GET_CODE (rtl) != CLZ)
15000 msb = const1_rtx;
15001 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15002 msb = GEN_INT (HOST_WIDE_INT_1U
15003 << (GET_MODE_BITSIZE (mode) - 1));
15004 else
15005 msb = immed_wide_int_const
15006 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15007 GET_MODE_PRECISION (mode)), mode);
15008 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15009 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15010 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15011 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15012 else
15013 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15014 VAR_INIT_STATUS_INITIALIZED);
15015 if (tmp == NULL)
15016 return NULL;
15017 add_loc_descr (&ret, tmp);
15018 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15019 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15020 add_loc_descr (&ret, l3jump);
15021 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15022 VAR_INIT_STATUS_INITIALIZED);
15023 if (tmp == NULL)
15024 return NULL;
15025 add_loc_descr (&ret, tmp);
15026 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15027 ? DW_OP_shl : DW_OP_shr, 0, 0));
15028 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15029 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15030 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15031 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15032 add_loc_descr (&ret, l2jump);
15033 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15034 add_loc_descr (&ret, l3label);
15035 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15036 add_loc_descr (&ret, l4label);
15037 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15038 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15039 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15040 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15041 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15042 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15043 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15044 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15045 return ret;
15046 }
15047
15048 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15049 const1 is DW_OP_lit1 or corresponding typed constant):
15050 const0 DW_OP_swap
15051 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15052 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15053 L2: DW_OP_drop
15054
15055 PARITY is similar:
15056 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15057 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15058 L2: DW_OP_drop */
15059
15060 static dw_loc_descr_ref
15061 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15062 machine_mode mem_mode)
15063 {
15064 dw_loc_descr_ref op0, ret, tmp;
15065 dw_loc_descr_ref l1jump, l1label;
15066 dw_loc_descr_ref l2jump, l2label;
15067
15068 if (GET_MODE (XEXP (rtl, 0)) != mode)
15069 return NULL;
15070
15071 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15072 VAR_INIT_STATUS_INITIALIZED);
15073 if (op0 == NULL)
15074 return NULL;
15075 ret = op0;
15076 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15077 VAR_INIT_STATUS_INITIALIZED);
15078 if (tmp == NULL)
15079 return NULL;
15080 add_loc_descr (&ret, tmp);
15081 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15082 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15083 add_loc_descr (&ret, l1label);
15084 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15085 add_loc_descr (&ret, l2jump);
15086 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15087 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15088 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15089 VAR_INIT_STATUS_INITIALIZED);
15090 if (tmp == NULL)
15091 return NULL;
15092 add_loc_descr (&ret, tmp);
15093 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15094 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15095 ? DW_OP_plus : DW_OP_xor, 0, 0));
15096 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15097 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15098 VAR_INIT_STATUS_INITIALIZED);
15099 add_loc_descr (&ret, tmp);
15100 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15101 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15102 add_loc_descr (&ret, l1jump);
15103 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15104 add_loc_descr (&ret, l2label);
15105 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15106 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15107 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15108 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15109 return ret;
15110 }
15111
15112 /* BSWAP (constS is initial shift count, either 56 or 24):
15113 constS const0
15114 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15115 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15116 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15117 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15118 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15119
15120 static dw_loc_descr_ref
15121 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15122 machine_mode mem_mode)
15123 {
15124 dw_loc_descr_ref op0, ret, tmp;
15125 dw_loc_descr_ref l1jump, l1label;
15126 dw_loc_descr_ref l2jump, l2label;
15127
15128 if (BITS_PER_UNIT != 8
15129 || (GET_MODE_BITSIZE (mode) != 32
15130 && GET_MODE_BITSIZE (mode) != 64))
15131 return NULL;
15132
15133 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15134 VAR_INIT_STATUS_INITIALIZED);
15135 if (op0 == NULL)
15136 return NULL;
15137
15138 ret = op0;
15139 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15140 mode, mem_mode,
15141 VAR_INIT_STATUS_INITIALIZED);
15142 if (tmp == NULL)
15143 return NULL;
15144 add_loc_descr (&ret, tmp);
15145 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15146 VAR_INIT_STATUS_INITIALIZED);
15147 if (tmp == NULL)
15148 return NULL;
15149 add_loc_descr (&ret, tmp);
15150 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15151 add_loc_descr (&ret, l1label);
15152 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15153 mode, mem_mode,
15154 VAR_INIT_STATUS_INITIALIZED);
15155 add_loc_descr (&ret, tmp);
15156 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15157 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15158 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15159 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15160 VAR_INIT_STATUS_INITIALIZED);
15161 if (tmp == NULL)
15162 return NULL;
15163 add_loc_descr (&ret, tmp);
15164 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15165 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15166 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15167 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15168 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15169 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15170 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15171 VAR_INIT_STATUS_INITIALIZED);
15172 add_loc_descr (&ret, tmp);
15173 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15174 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15175 add_loc_descr (&ret, l2jump);
15176 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15177 VAR_INIT_STATUS_INITIALIZED);
15178 add_loc_descr (&ret, tmp);
15179 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15180 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15181 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15182 add_loc_descr (&ret, l1jump);
15183 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15184 add_loc_descr (&ret, l2label);
15185 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15186 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15187 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15188 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15189 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15190 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15191 return ret;
15192 }
15193
15194 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15195 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15196 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15197 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15198
15199 ROTATERT is similar:
15200 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15201 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15202 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15203
15204 static dw_loc_descr_ref
15205 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15206 machine_mode mem_mode)
15207 {
15208 rtx rtlop1 = XEXP (rtl, 1);
15209 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15210 int i;
15211
15212 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15213 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15214 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15215 VAR_INIT_STATUS_INITIALIZED);
15216 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15217 VAR_INIT_STATUS_INITIALIZED);
15218 if (op0 == NULL || op1 == NULL)
15219 return NULL;
15220 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15221 for (i = 0; i < 2; i++)
15222 {
15223 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15224 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15225 mode, mem_mode,
15226 VAR_INIT_STATUS_INITIALIZED);
15227 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15228 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15229 ? DW_OP_const4u
15230 : HOST_BITS_PER_WIDE_INT == 64
15231 ? DW_OP_const8u : DW_OP_constu,
15232 GET_MODE_MASK (mode), 0);
15233 else
15234 mask[i] = NULL;
15235 if (mask[i] == NULL)
15236 return NULL;
15237 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15238 }
15239 ret = op0;
15240 add_loc_descr (&ret, op1);
15241 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15242 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15243 if (GET_CODE (rtl) == ROTATERT)
15244 {
15245 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15246 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15247 GET_MODE_BITSIZE (mode), 0));
15248 }
15249 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15250 if (mask[0] != NULL)
15251 add_loc_descr (&ret, mask[0]);
15252 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15253 if (mask[1] != NULL)
15254 {
15255 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15256 add_loc_descr (&ret, mask[1]);
15257 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15258 }
15259 if (GET_CODE (rtl) == ROTATE)
15260 {
15261 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15262 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15263 GET_MODE_BITSIZE (mode), 0));
15264 }
15265 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15266 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15267 return ret;
15268 }
15269
15270 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15271 for DEBUG_PARAMETER_REF RTL. */
15272
15273 static dw_loc_descr_ref
15274 parameter_ref_descriptor (rtx rtl)
15275 {
15276 dw_loc_descr_ref ret;
15277 dw_die_ref ref;
15278
15279 if (dwarf_strict)
15280 return NULL;
15281 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15282 /* With LTO during LTRANS we get the late DIE that refers to the early
15283 DIE, thus we add another indirection here. This seems to confuse
15284 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15285 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15286 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15287 if (ref)
15288 {
15289 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15290 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15291 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15292 }
15293 else
15294 {
15295 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15296 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15297 }
15298 return ret;
15299 }
15300
15301 /* The following routine converts the RTL for a variable or parameter
15302 (resident in memory) into an equivalent Dwarf representation of a
15303 mechanism for getting the address of that same variable onto the top of a
15304 hypothetical "address evaluation" stack.
15305
15306 When creating memory location descriptors, we are effectively transforming
15307 the RTL for a memory-resident object into its Dwarf postfix expression
15308 equivalent. This routine recursively descends an RTL tree, turning
15309 it into Dwarf postfix code as it goes.
15310
15311 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15312
15313 MEM_MODE is the mode of the memory reference, needed to handle some
15314 autoincrement addressing modes.
15315
15316 Return 0 if we can't represent the location. */
15317
15318 dw_loc_descr_ref
15319 mem_loc_descriptor (rtx rtl, machine_mode mode,
15320 machine_mode mem_mode,
15321 enum var_init_status initialized)
15322 {
15323 dw_loc_descr_ref mem_loc_result = NULL;
15324 enum dwarf_location_atom op;
15325 dw_loc_descr_ref op0, op1;
15326 rtx inner = NULL_RTX;
15327 poly_int64 offset;
15328
15329 if (mode == VOIDmode)
15330 mode = GET_MODE (rtl);
15331
15332 /* Note that for a dynamically sized array, the location we will generate a
15333 description of here will be the lowest numbered location which is
15334 actually within the array. That's *not* necessarily the same as the
15335 zeroth element of the array. */
15336
15337 rtl = targetm.delegitimize_address (rtl);
15338
15339 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15340 return NULL;
15341
15342 scalar_int_mode int_mode, inner_mode, op1_mode;
15343 switch (GET_CODE (rtl))
15344 {
15345 case POST_INC:
15346 case POST_DEC:
15347 case POST_MODIFY:
15348 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15349
15350 case SUBREG:
15351 /* The case of a subreg may arise when we have a local (register)
15352 variable or a formal (register) parameter which doesn't quite fill
15353 up an entire register. For now, just assume that it is
15354 legitimate to make the Dwarf info refer to the whole register which
15355 contains the given subreg. */
15356 if (!subreg_lowpart_p (rtl))
15357 break;
15358 inner = SUBREG_REG (rtl);
15359 /* FALLTHRU */
15360 case TRUNCATE:
15361 if (inner == NULL_RTX)
15362 inner = XEXP (rtl, 0);
15363 if (is_a <scalar_int_mode> (mode, &int_mode)
15364 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15365 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15366 #ifdef POINTERS_EXTEND_UNSIGNED
15367 || (int_mode == Pmode && mem_mode != VOIDmode)
15368 #endif
15369 )
15370 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15371 {
15372 mem_loc_result = mem_loc_descriptor (inner,
15373 inner_mode,
15374 mem_mode, initialized);
15375 break;
15376 }
15377 if (dwarf_strict && dwarf_version < 5)
15378 break;
15379 if (is_a <scalar_int_mode> (mode, &int_mode)
15380 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15381 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15382 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15383 {
15384 dw_die_ref type_die;
15385 dw_loc_descr_ref cvt;
15386
15387 mem_loc_result = mem_loc_descriptor (inner,
15388 GET_MODE (inner),
15389 mem_mode, initialized);
15390 if (mem_loc_result == NULL)
15391 break;
15392 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15393 if (type_die == NULL)
15394 {
15395 mem_loc_result = NULL;
15396 break;
15397 }
15398 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15399 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15400 else
15401 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15402 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15403 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15404 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15405 add_loc_descr (&mem_loc_result, cvt);
15406 if (is_a <scalar_int_mode> (mode, &int_mode)
15407 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15408 {
15409 /* Convert it to untyped afterwards. */
15410 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15411 add_loc_descr (&mem_loc_result, cvt);
15412 }
15413 }
15414 break;
15415
15416 case REG:
15417 if (!is_a <scalar_int_mode> (mode, &int_mode)
15418 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15419 && rtl != arg_pointer_rtx
15420 && rtl != frame_pointer_rtx
15421 #ifdef POINTERS_EXTEND_UNSIGNED
15422 && (int_mode != Pmode || mem_mode == VOIDmode)
15423 #endif
15424 ))
15425 {
15426 dw_die_ref type_die;
15427 unsigned int dbx_regnum;
15428
15429 if (dwarf_strict && dwarf_version < 5)
15430 break;
15431 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15432 break;
15433 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15434 if (type_die == NULL)
15435 break;
15436
15437 dbx_regnum = dbx_reg_number (rtl);
15438 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15439 break;
15440 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15441 dbx_regnum, 0);
15442 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15443 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15444 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15445 break;
15446 }
15447 /* Whenever a register number forms a part of the description of the
15448 method for calculating the (dynamic) address of a memory resident
15449 object, DWARF rules require the register number be referred to as
15450 a "base register". This distinction is not based in any way upon
15451 what category of register the hardware believes the given register
15452 belongs to. This is strictly DWARF terminology we're dealing with
15453 here. Note that in cases where the location of a memory-resident
15454 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15455 OP_CONST (0)) the actual DWARF location descriptor that we generate
15456 may just be OP_BASEREG (basereg). This may look deceptively like
15457 the object in question was allocated to a register (rather than in
15458 memory) so DWARF consumers need to be aware of the subtle
15459 distinction between OP_REG and OP_BASEREG. */
15460 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15461 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15462 else if (stack_realign_drap
15463 && crtl->drap_reg
15464 && crtl->args.internal_arg_pointer == rtl
15465 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15466 {
15467 /* If RTL is internal_arg_pointer, which has been optimized
15468 out, use DRAP instead. */
15469 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15470 VAR_INIT_STATUS_INITIALIZED);
15471 }
15472 break;
15473
15474 case SIGN_EXTEND:
15475 case ZERO_EXTEND:
15476 if (!is_a <scalar_int_mode> (mode, &int_mode)
15477 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15478 break;
15479 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15480 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15481 if (op0 == 0)
15482 break;
15483 else if (GET_CODE (rtl) == ZERO_EXTEND
15484 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15485 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15486 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15487 to expand zero extend as two shifts instead of
15488 masking. */
15489 && GET_MODE_SIZE (inner_mode) <= 4)
15490 {
15491 mem_loc_result = op0;
15492 add_loc_descr (&mem_loc_result,
15493 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15494 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15495 }
15496 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15497 {
15498 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15499 shift *= BITS_PER_UNIT;
15500 if (GET_CODE (rtl) == SIGN_EXTEND)
15501 op = DW_OP_shra;
15502 else
15503 op = DW_OP_shr;
15504 mem_loc_result = op0;
15505 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15506 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15507 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15508 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15509 }
15510 else if (!dwarf_strict || dwarf_version >= 5)
15511 {
15512 dw_die_ref type_die1, type_die2;
15513 dw_loc_descr_ref cvt;
15514
15515 type_die1 = base_type_for_mode (inner_mode,
15516 GET_CODE (rtl) == ZERO_EXTEND);
15517 if (type_die1 == NULL)
15518 break;
15519 type_die2 = base_type_for_mode (int_mode, 1);
15520 if (type_die2 == NULL)
15521 break;
15522 mem_loc_result = op0;
15523 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15524 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15525 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15526 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15527 add_loc_descr (&mem_loc_result, cvt);
15528 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15529 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15530 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15531 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15532 add_loc_descr (&mem_loc_result, cvt);
15533 }
15534 break;
15535
15536 case MEM:
15537 {
15538 rtx new_rtl = avoid_constant_pool_reference (rtl);
15539 if (new_rtl != rtl)
15540 {
15541 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15542 initialized);
15543 if (mem_loc_result != NULL)
15544 return mem_loc_result;
15545 }
15546 }
15547 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15548 get_address_mode (rtl), mode,
15549 VAR_INIT_STATUS_INITIALIZED);
15550 if (mem_loc_result == NULL)
15551 mem_loc_result = tls_mem_loc_descriptor (rtl);
15552 if (mem_loc_result != NULL)
15553 {
15554 if (!is_a <scalar_int_mode> (mode, &int_mode)
15555 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15556 {
15557 dw_die_ref type_die;
15558 dw_loc_descr_ref deref;
15559 HOST_WIDE_INT size;
15560
15561 if (dwarf_strict && dwarf_version < 5)
15562 return NULL;
15563 if (!GET_MODE_SIZE (mode).is_constant (&size))
15564 return NULL;
15565 type_die
15566 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15567 if (type_die == NULL)
15568 return NULL;
15569 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15570 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15571 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15572 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15573 add_loc_descr (&mem_loc_result, deref);
15574 }
15575 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15576 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15577 else
15578 add_loc_descr (&mem_loc_result,
15579 new_loc_descr (DW_OP_deref_size,
15580 GET_MODE_SIZE (int_mode), 0));
15581 }
15582 break;
15583
15584 case LO_SUM:
15585 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15586
15587 case LABEL_REF:
15588 /* Some ports can transform a symbol ref into a label ref, because
15589 the symbol ref is too far away and has to be dumped into a constant
15590 pool. */
15591 case CONST:
15592 case SYMBOL_REF:
15593 if (!is_a <scalar_int_mode> (mode, &int_mode)
15594 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15595 #ifdef POINTERS_EXTEND_UNSIGNED
15596 && (int_mode != Pmode || mem_mode == VOIDmode)
15597 #endif
15598 ))
15599 break;
15600 if (GET_CODE (rtl) == SYMBOL_REF
15601 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15602 {
15603 dw_loc_descr_ref temp;
15604
15605 /* If this is not defined, we have no way to emit the data. */
15606 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15607 break;
15608
15609 temp = new_addr_loc_descr (rtl, dtprel_true);
15610
15611 /* We check for DWARF 5 here because gdb did not implement
15612 DW_OP_form_tls_address until after 7.12. */
15613 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15614 ? DW_OP_form_tls_address
15615 : DW_OP_GNU_push_tls_address),
15616 0, 0);
15617 add_loc_descr (&mem_loc_result, temp);
15618
15619 break;
15620 }
15621
15622 if (!const_ok_for_output (rtl))
15623 {
15624 if (GET_CODE (rtl) == CONST)
15625 switch (GET_CODE (XEXP (rtl, 0)))
15626 {
15627 case NOT:
15628 op = DW_OP_not;
15629 goto try_const_unop;
15630 case NEG:
15631 op = DW_OP_neg;
15632 goto try_const_unop;
15633 try_const_unop:
15634 rtx arg;
15635 arg = XEXP (XEXP (rtl, 0), 0);
15636 if (!CONSTANT_P (arg))
15637 arg = gen_rtx_CONST (int_mode, arg);
15638 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15639 initialized);
15640 if (op0)
15641 {
15642 mem_loc_result = op0;
15643 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15644 }
15645 break;
15646 default:
15647 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15648 mem_mode, initialized);
15649 break;
15650 }
15651 break;
15652 }
15653
15654 symref:
15655 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15656 vec_safe_push (used_rtx_array, rtl);
15657 break;
15658
15659 case CONCAT:
15660 case CONCATN:
15661 case VAR_LOCATION:
15662 case DEBUG_IMPLICIT_PTR:
15663 expansion_failed (NULL_TREE, rtl,
15664 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15665 return 0;
15666
15667 case ENTRY_VALUE:
15668 if (dwarf_strict && dwarf_version < 5)
15669 return NULL;
15670 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15671 {
15672 if (!is_a <scalar_int_mode> (mode, &int_mode)
15673 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15674 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15675 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15676 else
15677 {
15678 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15679 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15680 return NULL;
15681 op0 = one_reg_loc_descriptor (dbx_regnum,
15682 VAR_INIT_STATUS_INITIALIZED);
15683 }
15684 }
15685 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15686 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15687 {
15688 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15689 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15690 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15691 return NULL;
15692 }
15693 else
15694 gcc_unreachable ();
15695 if (op0 == NULL)
15696 return NULL;
15697 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15698 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15699 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15700 break;
15701
15702 case DEBUG_PARAMETER_REF:
15703 mem_loc_result = parameter_ref_descriptor (rtl);
15704 break;
15705
15706 case PRE_MODIFY:
15707 /* Extract the PLUS expression nested inside and fall into
15708 PLUS code below. */
15709 rtl = XEXP (rtl, 1);
15710 goto plus;
15711
15712 case PRE_INC:
15713 case PRE_DEC:
15714 /* Turn these into a PLUS expression and fall into the PLUS code
15715 below. */
15716 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15717 gen_int_mode (GET_CODE (rtl) == PRE_INC
15718 ? GET_MODE_UNIT_SIZE (mem_mode)
15719 : -GET_MODE_UNIT_SIZE (mem_mode),
15720 mode));
15721
15722 /* fall through */
15723
15724 case PLUS:
15725 plus:
15726 if (is_based_loc (rtl)
15727 && is_a <scalar_int_mode> (mode, &int_mode)
15728 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15729 || XEXP (rtl, 0) == arg_pointer_rtx
15730 || XEXP (rtl, 0) == frame_pointer_rtx))
15731 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15732 INTVAL (XEXP (rtl, 1)),
15733 VAR_INIT_STATUS_INITIALIZED);
15734 else
15735 {
15736 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15737 VAR_INIT_STATUS_INITIALIZED);
15738 if (mem_loc_result == 0)
15739 break;
15740
15741 if (CONST_INT_P (XEXP (rtl, 1))
15742 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15743 <= DWARF2_ADDR_SIZE))
15744 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15745 else
15746 {
15747 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15748 VAR_INIT_STATUS_INITIALIZED);
15749 if (op1 == 0)
15750 return NULL;
15751 add_loc_descr (&mem_loc_result, op1);
15752 add_loc_descr (&mem_loc_result,
15753 new_loc_descr (DW_OP_plus, 0, 0));
15754 }
15755 }
15756 break;
15757
15758 /* If a pseudo-reg is optimized away, it is possible for it to
15759 be replaced with a MEM containing a multiply or shift. */
15760 case MINUS:
15761 op = DW_OP_minus;
15762 goto do_binop;
15763
15764 case MULT:
15765 op = DW_OP_mul;
15766 goto do_binop;
15767
15768 case DIV:
15769 if ((!dwarf_strict || dwarf_version >= 5)
15770 && is_a <scalar_int_mode> (mode, &int_mode)
15771 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15772 {
15773 mem_loc_result = typed_binop (DW_OP_div, rtl,
15774 base_type_for_mode (mode, 0),
15775 int_mode, mem_mode);
15776 break;
15777 }
15778 op = DW_OP_div;
15779 goto do_binop;
15780
15781 case UMOD:
15782 op = DW_OP_mod;
15783 goto do_binop;
15784
15785 case ASHIFT:
15786 op = DW_OP_shl;
15787 goto do_shift;
15788
15789 case ASHIFTRT:
15790 op = DW_OP_shra;
15791 goto do_shift;
15792
15793 case LSHIFTRT:
15794 op = DW_OP_shr;
15795 goto do_shift;
15796
15797 do_shift:
15798 if (!is_a <scalar_int_mode> (mode, &int_mode))
15799 break;
15800 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15801 VAR_INIT_STATUS_INITIALIZED);
15802 {
15803 rtx rtlop1 = XEXP (rtl, 1);
15804 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15805 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15806 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15807 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15808 VAR_INIT_STATUS_INITIALIZED);
15809 }
15810
15811 if (op0 == 0 || op1 == 0)
15812 break;
15813
15814 mem_loc_result = op0;
15815 add_loc_descr (&mem_loc_result, op1);
15816 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15817 break;
15818
15819 case AND:
15820 op = DW_OP_and;
15821 goto do_binop;
15822
15823 case IOR:
15824 op = DW_OP_or;
15825 goto do_binop;
15826
15827 case XOR:
15828 op = DW_OP_xor;
15829 goto do_binop;
15830
15831 do_binop:
15832 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15833 VAR_INIT_STATUS_INITIALIZED);
15834 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15835 VAR_INIT_STATUS_INITIALIZED);
15836
15837 if (op0 == 0 || op1 == 0)
15838 break;
15839
15840 mem_loc_result = op0;
15841 add_loc_descr (&mem_loc_result, op1);
15842 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15843 break;
15844
15845 case MOD:
15846 if ((!dwarf_strict || dwarf_version >= 5)
15847 && is_a <scalar_int_mode> (mode, &int_mode)
15848 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15849 {
15850 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15851 base_type_for_mode (mode, 0),
15852 int_mode, mem_mode);
15853 break;
15854 }
15855
15856 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15857 VAR_INIT_STATUS_INITIALIZED);
15858 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15859 VAR_INIT_STATUS_INITIALIZED);
15860
15861 if (op0 == 0 || op1 == 0)
15862 break;
15863
15864 mem_loc_result = op0;
15865 add_loc_descr (&mem_loc_result, op1);
15866 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15867 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15868 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15869 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15870 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15871 break;
15872
15873 case UDIV:
15874 if ((!dwarf_strict || dwarf_version >= 5)
15875 && is_a <scalar_int_mode> (mode, &int_mode))
15876 {
15877 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15878 {
15879 op = DW_OP_div;
15880 goto do_binop;
15881 }
15882 mem_loc_result = typed_binop (DW_OP_div, rtl,
15883 base_type_for_mode (int_mode, 1),
15884 int_mode, mem_mode);
15885 }
15886 break;
15887
15888 case NOT:
15889 op = DW_OP_not;
15890 goto do_unop;
15891
15892 case ABS:
15893 op = DW_OP_abs;
15894 goto do_unop;
15895
15896 case NEG:
15897 op = DW_OP_neg;
15898 goto do_unop;
15899
15900 do_unop:
15901 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15902 VAR_INIT_STATUS_INITIALIZED);
15903
15904 if (op0 == 0)
15905 break;
15906
15907 mem_loc_result = op0;
15908 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15909 break;
15910
15911 case CONST_INT:
15912 if (!is_a <scalar_int_mode> (mode, &int_mode)
15913 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15914 #ifdef POINTERS_EXTEND_UNSIGNED
15915 || (int_mode == Pmode
15916 && mem_mode != VOIDmode
15917 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15918 #endif
15919 )
15920 {
15921 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15922 break;
15923 }
15924 if ((!dwarf_strict || dwarf_version >= 5)
15925 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15926 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15927 {
15928 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15929 scalar_int_mode amode;
15930 if (type_die == NULL)
15931 return NULL;
15932 if (INTVAL (rtl) >= 0
15933 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15934 .exists (&amode))
15935 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15936 /* const DW_OP_convert <XXX> vs.
15937 DW_OP_const_type <XXX, 1, const>. */
15938 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15939 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15940 {
15941 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15942 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15943 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15944 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15945 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15946 add_loc_descr (&mem_loc_result, op0);
15947 return mem_loc_result;
15948 }
15949 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15950 INTVAL (rtl));
15951 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15952 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15953 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15954 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15955 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15956 else
15957 {
15958 mem_loc_result->dw_loc_oprnd2.val_class
15959 = dw_val_class_const_double;
15960 mem_loc_result->dw_loc_oprnd2.v.val_double
15961 = double_int::from_shwi (INTVAL (rtl));
15962 }
15963 }
15964 break;
15965
15966 case CONST_DOUBLE:
15967 if (!dwarf_strict || dwarf_version >= 5)
15968 {
15969 dw_die_ref type_die;
15970
15971 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15972 CONST_DOUBLE rtx could represent either a large integer
15973 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15974 the value is always a floating point constant.
15975
15976 When it is an integer, a CONST_DOUBLE is used whenever
15977 the constant requires 2 HWIs to be adequately represented.
15978 We output CONST_DOUBLEs as blocks. */
15979 if (mode == VOIDmode
15980 || (GET_MODE (rtl) == VOIDmode
15981 && maybe_ne (GET_MODE_BITSIZE (mode),
15982 HOST_BITS_PER_DOUBLE_INT)))
15983 break;
15984 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15985 if (type_die == NULL)
15986 return NULL;
15987 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15988 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15989 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15990 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15991 #if TARGET_SUPPORTS_WIDE_INT == 0
15992 if (!SCALAR_FLOAT_MODE_P (mode))
15993 {
15994 mem_loc_result->dw_loc_oprnd2.val_class
15995 = dw_val_class_const_double;
15996 mem_loc_result->dw_loc_oprnd2.v.val_double
15997 = rtx_to_double_int (rtl);
15998 }
15999 else
16000 #endif
16001 {
16002 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16003 unsigned int length = GET_MODE_SIZE (float_mode);
16004 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16005
16006 insert_float (rtl, array);
16007 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16008 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16009 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16010 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16011 }
16012 }
16013 break;
16014
16015 case CONST_WIDE_INT:
16016 if (!dwarf_strict || dwarf_version >= 5)
16017 {
16018 dw_die_ref type_die;
16019
16020 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16021 if (type_die == NULL)
16022 return NULL;
16023 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16024 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16025 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16026 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16027 mem_loc_result->dw_loc_oprnd2.val_class
16028 = dw_val_class_wide_int;
16029 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16030 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16031 }
16032 break;
16033
16034 case CONST_POLY_INT:
16035 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16036 break;
16037
16038 case EQ:
16039 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16040 break;
16041
16042 case GE:
16043 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16044 break;
16045
16046 case GT:
16047 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16048 break;
16049
16050 case LE:
16051 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16052 break;
16053
16054 case LT:
16055 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16056 break;
16057
16058 case NE:
16059 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16060 break;
16061
16062 case GEU:
16063 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16064 break;
16065
16066 case GTU:
16067 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16068 break;
16069
16070 case LEU:
16071 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16072 break;
16073
16074 case LTU:
16075 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16076 break;
16077
16078 case UMIN:
16079 case UMAX:
16080 if (!SCALAR_INT_MODE_P (mode))
16081 break;
16082 /* FALLTHRU */
16083 case SMIN:
16084 case SMAX:
16085 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16086 break;
16087
16088 case ZERO_EXTRACT:
16089 case SIGN_EXTRACT:
16090 if (CONST_INT_P (XEXP (rtl, 1))
16091 && CONST_INT_P (XEXP (rtl, 2))
16092 && is_a <scalar_int_mode> (mode, &int_mode)
16093 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16094 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16095 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16096 && ((unsigned) INTVAL (XEXP (rtl, 1))
16097 + (unsigned) INTVAL (XEXP (rtl, 2))
16098 <= GET_MODE_BITSIZE (int_mode)))
16099 {
16100 int shift, size;
16101 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16102 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16103 if (op0 == 0)
16104 break;
16105 if (GET_CODE (rtl) == SIGN_EXTRACT)
16106 op = DW_OP_shra;
16107 else
16108 op = DW_OP_shr;
16109 mem_loc_result = op0;
16110 size = INTVAL (XEXP (rtl, 1));
16111 shift = INTVAL (XEXP (rtl, 2));
16112 if (BITS_BIG_ENDIAN)
16113 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16114 if (shift + size != (int) DWARF2_ADDR_SIZE)
16115 {
16116 add_loc_descr (&mem_loc_result,
16117 int_loc_descriptor (DWARF2_ADDR_SIZE
16118 - shift - size));
16119 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16120 }
16121 if (size != (int) DWARF2_ADDR_SIZE)
16122 {
16123 add_loc_descr (&mem_loc_result,
16124 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16125 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16126 }
16127 }
16128 break;
16129
16130 case IF_THEN_ELSE:
16131 {
16132 dw_loc_descr_ref op2, bra_node, drop_node;
16133 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16134 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16135 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16136 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16137 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16138 VAR_INIT_STATUS_INITIALIZED);
16139 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16140 VAR_INIT_STATUS_INITIALIZED);
16141 if (op0 == NULL || op1 == NULL || op2 == NULL)
16142 break;
16143
16144 mem_loc_result = op1;
16145 add_loc_descr (&mem_loc_result, op2);
16146 add_loc_descr (&mem_loc_result, op0);
16147 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16148 add_loc_descr (&mem_loc_result, bra_node);
16149 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16150 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16151 add_loc_descr (&mem_loc_result, drop_node);
16152 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16153 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16154 }
16155 break;
16156
16157 case FLOAT_EXTEND:
16158 case FLOAT_TRUNCATE:
16159 case FLOAT:
16160 case UNSIGNED_FLOAT:
16161 case FIX:
16162 case UNSIGNED_FIX:
16163 if (!dwarf_strict || dwarf_version >= 5)
16164 {
16165 dw_die_ref type_die;
16166 dw_loc_descr_ref cvt;
16167
16168 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16169 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16170 if (op0 == NULL)
16171 break;
16172 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16173 && (GET_CODE (rtl) == FLOAT
16174 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16175 {
16176 type_die = base_type_for_mode (int_mode,
16177 GET_CODE (rtl) == UNSIGNED_FLOAT);
16178 if (type_die == NULL)
16179 break;
16180 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16181 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16182 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16183 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16184 add_loc_descr (&op0, cvt);
16185 }
16186 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16187 if (type_die == NULL)
16188 break;
16189 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16190 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16191 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16192 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16193 add_loc_descr (&op0, cvt);
16194 if (is_a <scalar_int_mode> (mode, &int_mode)
16195 && (GET_CODE (rtl) == FIX
16196 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16197 {
16198 op0 = convert_descriptor_to_mode (int_mode, op0);
16199 if (op0 == NULL)
16200 break;
16201 }
16202 mem_loc_result = op0;
16203 }
16204 break;
16205
16206 case CLZ:
16207 case CTZ:
16208 case FFS:
16209 if (is_a <scalar_int_mode> (mode, &int_mode))
16210 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16211 break;
16212
16213 case POPCOUNT:
16214 case PARITY:
16215 if (is_a <scalar_int_mode> (mode, &int_mode))
16216 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16217 break;
16218
16219 case BSWAP:
16220 if (is_a <scalar_int_mode> (mode, &int_mode))
16221 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16222 break;
16223
16224 case ROTATE:
16225 case ROTATERT:
16226 if (is_a <scalar_int_mode> (mode, &int_mode))
16227 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16228 break;
16229
16230 case COMPARE:
16231 /* In theory, we could implement the above. */
16232 /* DWARF cannot represent the unsigned compare operations
16233 natively. */
16234 case SS_MULT:
16235 case US_MULT:
16236 case SS_DIV:
16237 case US_DIV:
16238 case SS_PLUS:
16239 case US_PLUS:
16240 case SS_MINUS:
16241 case US_MINUS:
16242 case SS_NEG:
16243 case US_NEG:
16244 case SS_ABS:
16245 case SS_ASHIFT:
16246 case US_ASHIFT:
16247 case SS_TRUNCATE:
16248 case US_TRUNCATE:
16249 case UNORDERED:
16250 case ORDERED:
16251 case UNEQ:
16252 case UNGE:
16253 case UNGT:
16254 case UNLE:
16255 case UNLT:
16256 case LTGT:
16257 case FRACT_CONVERT:
16258 case UNSIGNED_FRACT_CONVERT:
16259 case SAT_FRACT:
16260 case UNSIGNED_SAT_FRACT:
16261 case SQRT:
16262 case ASM_OPERANDS:
16263 case VEC_MERGE:
16264 case VEC_SELECT:
16265 case VEC_CONCAT:
16266 case VEC_DUPLICATE:
16267 case VEC_SERIES:
16268 case UNSPEC:
16269 case HIGH:
16270 case FMA:
16271 case STRICT_LOW_PART:
16272 case CONST_VECTOR:
16273 case CONST_FIXED:
16274 case CLRSB:
16275 case CLOBBER:
16276 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16277 can't express it in the debug info. This can happen e.g. with some
16278 TLS UNSPECs. */
16279 break;
16280
16281 case CONST_STRING:
16282 resolve_one_addr (&rtl);
16283 goto symref;
16284
16285 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16286 the expression. An UNSPEC rtx represents a raw DWARF operation,
16287 new_loc_descr is called for it to build the operation directly.
16288 Otherwise mem_loc_descriptor is called recursively. */
16289 case PARALLEL:
16290 {
16291 int index = 0;
16292 dw_loc_descr_ref exp_result = NULL;
16293
16294 for (; index < XVECLEN (rtl, 0); index++)
16295 {
16296 rtx elem = XVECEXP (rtl, 0, index);
16297 if (GET_CODE (elem) == UNSPEC)
16298 {
16299 /* Each DWARF operation UNSPEC contain two operands, if
16300 one operand is not used for the operation, const0_rtx is
16301 passed. */
16302 gcc_assert (XVECLEN (elem, 0) == 2);
16303
16304 HOST_WIDE_INT dw_op = XINT (elem, 1);
16305 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16306 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16307 exp_result
16308 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16309 oprnd2);
16310 }
16311 else
16312 exp_result
16313 = mem_loc_descriptor (elem, mode, mem_mode,
16314 VAR_INIT_STATUS_INITIALIZED);
16315
16316 if (!mem_loc_result)
16317 mem_loc_result = exp_result;
16318 else
16319 add_loc_descr (&mem_loc_result, exp_result);
16320 }
16321
16322 break;
16323 }
16324
16325 default:
16326 if (flag_checking)
16327 {
16328 print_rtl (stderr, rtl);
16329 gcc_unreachable ();
16330 }
16331 break;
16332 }
16333
16334 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16335 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16336
16337 return mem_loc_result;
16338 }
16339
16340 /* Return a descriptor that describes the concatenation of two locations.
16341 This is typically a complex variable. */
16342
16343 static dw_loc_descr_ref
16344 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16345 {
16346 /* At present we only track constant-sized pieces. */
16347 unsigned int size0, size1;
16348 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16349 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16350 return 0;
16351
16352 dw_loc_descr_ref cc_loc_result = NULL;
16353 dw_loc_descr_ref x0_ref
16354 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16355 dw_loc_descr_ref x1_ref
16356 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16357
16358 if (x0_ref == 0 || x1_ref == 0)
16359 return 0;
16360
16361 cc_loc_result = x0_ref;
16362 add_loc_descr_op_piece (&cc_loc_result, size0);
16363
16364 add_loc_descr (&cc_loc_result, x1_ref);
16365 add_loc_descr_op_piece (&cc_loc_result, size1);
16366
16367 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16368 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16369
16370 return cc_loc_result;
16371 }
16372
16373 /* Return a descriptor that describes the concatenation of N
16374 locations. */
16375
16376 static dw_loc_descr_ref
16377 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16378 {
16379 unsigned int i;
16380 dw_loc_descr_ref cc_loc_result = NULL;
16381 unsigned int n = XVECLEN (concatn, 0);
16382 unsigned int size;
16383
16384 for (i = 0; i < n; ++i)
16385 {
16386 dw_loc_descr_ref ref;
16387 rtx x = XVECEXP (concatn, 0, i);
16388
16389 /* At present we only track constant-sized pieces. */
16390 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16391 return NULL;
16392
16393 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16394 if (ref == NULL)
16395 return NULL;
16396
16397 add_loc_descr (&cc_loc_result, ref);
16398 add_loc_descr_op_piece (&cc_loc_result, size);
16399 }
16400
16401 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16402 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16403
16404 return cc_loc_result;
16405 }
16406
16407 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16408 for DEBUG_IMPLICIT_PTR RTL. */
16409
16410 static dw_loc_descr_ref
16411 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16412 {
16413 dw_loc_descr_ref ret;
16414 dw_die_ref ref;
16415
16416 if (dwarf_strict && dwarf_version < 5)
16417 return NULL;
16418 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16419 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16420 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16421 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16422 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16423 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16424 if (ref)
16425 {
16426 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16427 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16428 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16429 }
16430 else
16431 {
16432 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16433 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16434 }
16435 return ret;
16436 }
16437
16438 /* Output a proper Dwarf location descriptor for a variable or parameter
16439 which is either allocated in a register or in a memory location. For a
16440 register, we just generate an OP_REG and the register number. For a
16441 memory location we provide a Dwarf postfix expression describing how to
16442 generate the (dynamic) address of the object onto the address stack.
16443
16444 MODE is mode of the decl if this loc_descriptor is going to be used in
16445 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16446 allowed, VOIDmode otherwise.
16447
16448 If we don't know how to describe it, return 0. */
16449
16450 static dw_loc_descr_ref
16451 loc_descriptor (rtx rtl, machine_mode mode,
16452 enum var_init_status initialized)
16453 {
16454 dw_loc_descr_ref loc_result = NULL;
16455 scalar_int_mode int_mode;
16456
16457 switch (GET_CODE (rtl))
16458 {
16459 case SUBREG:
16460 /* The case of a subreg may arise when we have a local (register)
16461 variable or a formal (register) parameter which doesn't quite fill
16462 up an entire register. For now, just assume that it is
16463 legitimate to make the Dwarf info refer to the whole register which
16464 contains the given subreg. */
16465 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16466 loc_result = loc_descriptor (SUBREG_REG (rtl),
16467 GET_MODE (SUBREG_REG (rtl)), initialized);
16468 else
16469 goto do_default;
16470 break;
16471
16472 case REG:
16473 loc_result = reg_loc_descriptor (rtl, initialized);
16474 break;
16475
16476 case MEM:
16477 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16478 GET_MODE (rtl), initialized);
16479 if (loc_result == NULL)
16480 loc_result = tls_mem_loc_descriptor (rtl);
16481 if (loc_result == NULL)
16482 {
16483 rtx new_rtl = avoid_constant_pool_reference (rtl);
16484 if (new_rtl != rtl)
16485 loc_result = loc_descriptor (new_rtl, mode, initialized);
16486 }
16487 break;
16488
16489 case CONCAT:
16490 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16491 initialized);
16492 break;
16493
16494 case CONCATN:
16495 loc_result = concatn_loc_descriptor (rtl, initialized);
16496 break;
16497
16498 case VAR_LOCATION:
16499 /* Single part. */
16500 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16501 {
16502 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16503 if (GET_CODE (loc) == EXPR_LIST)
16504 loc = XEXP (loc, 0);
16505 loc_result = loc_descriptor (loc, mode, initialized);
16506 break;
16507 }
16508
16509 rtl = XEXP (rtl, 1);
16510 /* FALLTHRU */
16511
16512 case PARALLEL:
16513 {
16514 rtvec par_elems = XVEC (rtl, 0);
16515 int num_elem = GET_NUM_ELEM (par_elems);
16516 machine_mode mode;
16517 int i, size;
16518
16519 /* Create the first one, so we have something to add to. */
16520 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16521 VOIDmode, initialized);
16522 if (loc_result == NULL)
16523 return NULL;
16524 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16525 /* At present we only track constant-sized pieces. */
16526 if (!GET_MODE_SIZE (mode).is_constant (&size))
16527 return NULL;
16528 add_loc_descr_op_piece (&loc_result, size);
16529 for (i = 1; i < num_elem; i++)
16530 {
16531 dw_loc_descr_ref temp;
16532
16533 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16534 VOIDmode, initialized);
16535 if (temp == NULL)
16536 return NULL;
16537 add_loc_descr (&loc_result, temp);
16538 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16539 /* At present we only track constant-sized pieces. */
16540 if (!GET_MODE_SIZE (mode).is_constant (&size))
16541 return NULL;
16542 add_loc_descr_op_piece (&loc_result, size);
16543 }
16544 }
16545 break;
16546
16547 case CONST_INT:
16548 if (mode != VOIDmode && mode != BLKmode)
16549 {
16550 int_mode = as_a <scalar_int_mode> (mode);
16551 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16552 INTVAL (rtl));
16553 }
16554 break;
16555
16556 case CONST_DOUBLE:
16557 if (mode == VOIDmode)
16558 mode = GET_MODE (rtl);
16559
16560 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16561 {
16562 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16563
16564 /* Note that a CONST_DOUBLE rtx could represent either an integer
16565 or a floating-point constant. A CONST_DOUBLE is used whenever
16566 the constant requires more than one word in order to be
16567 adequately represented. We output CONST_DOUBLEs as blocks. */
16568 scalar_mode smode = as_a <scalar_mode> (mode);
16569 loc_result = new_loc_descr (DW_OP_implicit_value,
16570 GET_MODE_SIZE (smode), 0);
16571 #if TARGET_SUPPORTS_WIDE_INT == 0
16572 if (!SCALAR_FLOAT_MODE_P (smode))
16573 {
16574 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16575 loc_result->dw_loc_oprnd2.v.val_double
16576 = rtx_to_double_int (rtl);
16577 }
16578 else
16579 #endif
16580 {
16581 unsigned int length = GET_MODE_SIZE (smode);
16582 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16583
16584 insert_float (rtl, array);
16585 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16586 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16587 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16588 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16589 }
16590 }
16591 break;
16592
16593 case CONST_WIDE_INT:
16594 if (mode == VOIDmode)
16595 mode = GET_MODE (rtl);
16596
16597 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16598 {
16599 int_mode = as_a <scalar_int_mode> (mode);
16600 loc_result = new_loc_descr (DW_OP_implicit_value,
16601 GET_MODE_SIZE (int_mode), 0);
16602 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16603 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16604 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16605 }
16606 break;
16607
16608 case CONST_VECTOR:
16609 if (mode == VOIDmode)
16610 mode = GET_MODE (rtl);
16611
16612 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16613 {
16614 unsigned int length;
16615 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16616 return NULL;
16617
16618 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16619 unsigned char *array
16620 = ggc_vec_alloc<unsigned char> (length * elt_size);
16621 unsigned int i;
16622 unsigned char *p;
16623 machine_mode imode = GET_MODE_INNER (mode);
16624
16625 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16626 switch (GET_MODE_CLASS (mode))
16627 {
16628 case MODE_VECTOR_INT:
16629 for (i = 0, p = array; i < length; i++, p += elt_size)
16630 {
16631 rtx elt = CONST_VECTOR_ELT (rtl, i);
16632 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16633 }
16634 break;
16635
16636 case MODE_VECTOR_FLOAT:
16637 for (i = 0, p = array; i < length; i++, p += elt_size)
16638 {
16639 rtx elt = CONST_VECTOR_ELT (rtl, i);
16640 insert_float (elt, p);
16641 }
16642 break;
16643
16644 default:
16645 gcc_unreachable ();
16646 }
16647
16648 loc_result = new_loc_descr (DW_OP_implicit_value,
16649 length * elt_size, 0);
16650 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16651 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16652 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16653 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16654 }
16655 break;
16656
16657 case CONST:
16658 if (mode == VOIDmode
16659 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16660 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16661 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16662 {
16663 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16664 break;
16665 }
16666 /* FALLTHROUGH */
16667 case SYMBOL_REF:
16668 if (!const_ok_for_output (rtl))
16669 break;
16670 /* FALLTHROUGH */
16671 case LABEL_REF:
16672 if (is_a <scalar_int_mode> (mode, &int_mode)
16673 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16674 && (dwarf_version >= 4 || !dwarf_strict))
16675 {
16676 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16677 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16678 vec_safe_push (used_rtx_array, rtl);
16679 }
16680 break;
16681
16682 case DEBUG_IMPLICIT_PTR:
16683 loc_result = implicit_ptr_descriptor (rtl, 0);
16684 break;
16685
16686 case PLUS:
16687 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16688 && CONST_INT_P (XEXP (rtl, 1)))
16689 {
16690 loc_result
16691 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16692 break;
16693 }
16694 /* FALLTHRU */
16695 do_default:
16696 default:
16697 if ((is_a <scalar_int_mode> (mode, &int_mode)
16698 && GET_MODE (rtl) == int_mode
16699 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16700 && dwarf_version >= 4)
16701 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16702 {
16703 /* Value expression. */
16704 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16705 if (loc_result)
16706 add_loc_descr (&loc_result,
16707 new_loc_descr (DW_OP_stack_value, 0, 0));
16708 }
16709 break;
16710 }
16711
16712 return loc_result;
16713 }
16714
16715 /* We need to figure out what section we should use as the base for the
16716 address ranges where a given location is valid.
16717 1. If this particular DECL has a section associated with it, use that.
16718 2. If this function has a section associated with it, use that.
16719 3. Otherwise, use the text section.
16720 XXX: If you split a variable across multiple sections, we won't notice. */
16721
16722 static const char *
16723 secname_for_decl (const_tree decl)
16724 {
16725 const char *secname;
16726
16727 if (VAR_OR_FUNCTION_DECL_P (decl)
16728 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16729 && DECL_SECTION_NAME (decl))
16730 secname = DECL_SECTION_NAME (decl);
16731 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16732 secname = DECL_SECTION_NAME (current_function_decl);
16733 else if (cfun && in_cold_section_p)
16734 secname = crtl->subsections.cold_section_label;
16735 else
16736 secname = text_section_label;
16737
16738 return secname;
16739 }
16740
16741 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16742
16743 static bool
16744 decl_by_reference_p (tree decl)
16745 {
16746 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16747 || VAR_P (decl))
16748 && DECL_BY_REFERENCE (decl));
16749 }
16750
16751 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16752 for VARLOC. */
16753
16754 static dw_loc_descr_ref
16755 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16756 enum var_init_status initialized)
16757 {
16758 int have_address = 0;
16759 dw_loc_descr_ref descr;
16760 machine_mode mode;
16761
16762 if (want_address != 2)
16763 {
16764 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16765 /* Single part. */
16766 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16767 {
16768 varloc = PAT_VAR_LOCATION_LOC (varloc);
16769 if (GET_CODE (varloc) == EXPR_LIST)
16770 varloc = XEXP (varloc, 0);
16771 mode = GET_MODE (varloc);
16772 if (MEM_P (varloc))
16773 {
16774 rtx addr = XEXP (varloc, 0);
16775 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16776 mode, initialized);
16777 if (descr)
16778 have_address = 1;
16779 else
16780 {
16781 rtx x = avoid_constant_pool_reference (varloc);
16782 if (x != varloc)
16783 descr = mem_loc_descriptor (x, mode, VOIDmode,
16784 initialized);
16785 }
16786 }
16787 else
16788 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16789 }
16790 else
16791 return 0;
16792 }
16793 else
16794 {
16795 if (GET_CODE (varloc) == VAR_LOCATION)
16796 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16797 else
16798 mode = DECL_MODE (loc);
16799 descr = loc_descriptor (varloc, mode, initialized);
16800 have_address = 1;
16801 }
16802
16803 if (!descr)
16804 return 0;
16805
16806 if (want_address == 2 && !have_address
16807 && (dwarf_version >= 4 || !dwarf_strict))
16808 {
16809 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16810 {
16811 expansion_failed (loc, NULL_RTX,
16812 "DWARF address size mismatch");
16813 return 0;
16814 }
16815 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16816 have_address = 1;
16817 }
16818 /* Show if we can't fill the request for an address. */
16819 if (want_address && !have_address)
16820 {
16821 expansion_failed (loc, NULL_RTX,
16822 "Want address and only have value");
16823 return 0;
16824 }
16825
16826 /* If we've got an address and don't want one, dereference. */
16827 if (!want_address && have_address)
16828 {
16829 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16830 enum dwarf_location_atom op;
16831
16832 if (size > DWARF2_ADDR_SIZE || size == -1)
16833 {
16834 expansion_failed (loc, NULL_RTX,
16835 "DWARF address size mismatch");
16836 return 0;
16837 }
16838 else if (size == DWARF2_ADDR_SIZE)
16839 op = DW_OP_deref;
16840 else
16841 op = DW_OP_deref_size;
16842
16843 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16844 }
16845
16846 return descr;
16847 }
16848
16849 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16850 if it is not possible. */
16851
16852 static dw_loc_descr_ref
16853 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16854 {
16855 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16856 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16857 else if (dwarf_version >= 3 || !dwarf_strict)
16858 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16859 else
16860 return NULL;
16861 }
16862
16863 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16864 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16865
16866 static dw_loc_descr_ref
16867 dw_sra_loc_expr (tree decl, rtx loc)
16868 {
16869 rtx p;
16870 unsigned HOST_WIDE_INT padsize = 0;
16871 dw_loc_descr_ref descr, *descr_tail;
16872 unsigned HOST_WIDE_INT decl_size;
16873 rtx varloc;
16874 enum var_init_status initialized;
16875
16876 if (DECL_SIZE (decl) == NULL
16877 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16878 return NULL;
16879
16880 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16881 descr = NULL;
16882 descr_tail = &descr;
16883
16884 for (p = loc; p; p = XEXP (p, 1))
16885 {
16886 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16887 rtx loc_note = *decl_piece_varloc_ptr (p);
16888 dw_loc_descr_ref cur_descr;
16889 dw_loc_descr_ref *tail, last = NULL;
16890 unsigned HOST_WIDE_INT opsize = 0;
16891
16892 if (loc_note == NULL_RTX
16893 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16894 {
16895 padsize += bitsize;
16896 continue;
16897 }
16898 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16899 varloc = NOTE_VAR_LOCATION (loc_note);
16900 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16901 if (cur_descr == NULL)
16902 {
16903 padsize += bitsize;
16904 continue;
16905 }
16906
16907 /* Check that cur_descr either doesn't use
16908 DW_OP_*piece operations, or their sum is equal
16909 to bitsize. Otherwise we can't embed it. */
16910 for (tail = &cur_descr; *tail != NULL;
16911 tail = &(*tail)->dw_loc_next)
16912 if ((*tail)->dw_loc_opc == DW_OP_piece)
16913 {
16914 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16915 * BITS_PER_UNIT;
16916 last = *tail;
16917 }
16918 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16919 {
16920 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16921 last = *tail;
16922 }
16923
16924 if (last != NULL && opsize != bitsize)
16925 {
16926 padsize += bitsize;
16927 /* Discard the current piece of the descriptor and release any
16928 addr_table entries it uses. */
16929 remove_loc_list_addr_table_entries (cur_descr);
16930 continue;
16931 }
16932
16933 /* If there is a hole, add DW_OP_*piece after empty DWARF
16934 expression, which means that those bits are optimized out. */
16935 if (padsize)
16936 {
16937 if (padsize > decl_size)
16938 {
16939 remove_loc_list_addr_table_entries (cur_descr);
16940 goto discard_descr;
16941 }
16942 decl_size -= padsize;
16943 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16944 if (*descr_tail == NULL)
16945 {
16946 remove_loc_list_addr_table_entries (cur_descr);
16947 goto discard_descr;
16948 }
16949 descr_tail = &(*descr_tail)->dw_loc_next;
16950 padsize = 0;
16951 }
16952 *descr_tail = cur_descr;
16953 descr_tail = tail;
16954 if (bitsize > decl_size)
16955 goto discard_descr;
16956 decl_size -= bitsize;
16957 if (last == NULL)
16958 {
16959 HOST_WIDE_INT offset = 0;
16960 if (GET_CODE (varloc) == VAR_LOCATION
16961 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16962 {
16963 varloc = PAT_VAR_LOCATION_LOC (varloc);
16964 if (GET_CODE (varloc) == EXPR_LIST)
16965 varloc = XEXP (varloc, 0);
16966 }
16967 do
16968 {
16969 if (GET_CODE (varloc) == CONST
16970 || GET_CODE (varloc) == SIGN_EXTEND
16971 || GET_CODE (varloc) == ZERO_EXTEND)
16972 varloc = XEXP (varloc, 0);
16973 else if (GET_CODE (varloc) == SUBREG)
16974 varloc = SUBREG_REG (varloc);
16975 else
16976 break;
16977 }
16978 while (1);
16979 /* DW_OP_bit_size offset should be zero for register
16980 or implicit location descriptions and empty location
16981 descriptions, but for memory addresses needs big endian
16982 adjustment. */
16983 if (MEM_P (varloc))
16984 {
16985 unsigned HOST_WIDE_INT memsize;
16986 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16987 goto discard_descr;
16988 memsize *= BITS_PER_UNIT;
16989 if (memsize != bitsize)
16990 {
16991 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16992 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16993 goto discard_descr;
16994 if (memsize < bitsize)
16995 goto discard_descr;
16996 if (BITS_BIG_ENDIAN)
16997 offset = memsize - bitsize;
16998 }
16999 }
17000
17001 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17002 if (*descr_tail == NULL)
17003 goto discard_descr;
17004 descr_tail = &(*descr_tail)->dw_loc_next;
17005 }
17006 }
17007
17008 /* If there were any non-empty expressions, add padding till the end of
17009 the decl. */
17010 if (descr != NULL && decl_size != 0)
17011 {
17012 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17013 if (*descr_tail == NULL)
17014 goto discard_descr;
17015 }
17016 return descr;
17017
17018 discard_descr:
17019 /* Discard the descriptor and release any addr_table entries it uses. */
17020 remove_loc_list_addr_table_entries (descr);
17021 return NULL;
17022 }
17023
17024 /* Return the dwarf representation of the location list LOC_LIST of
17025 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17026 function. */
17027
17028 static dw_loc_list_ref
17029 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17030 {
17031 const char *endname, *secname;
17032 var_loc_view endview;
17033 rtx varloc;
17034 enum var_init_status initialized;
17035 struct var_loc_node *node;
17036 dw_loc_descr_ref descr;
17037 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17038 dw_loc_list_ref list = NULL;
17039 dw_loc_list_ref *listp = &list;
17040
17041 /* Now that we know what section we are using for a base,
17042 actually construct the list of locations.
17043 The first location information is what is passed to the
17044 function that creates the location list, and the remaining
17045 locations just get added on to that list.
17046 Note that we only know the start address for a location
17047 (IE location changes), so to build the range, we use
17048 the range [current location start, next location start].
17049 This means we have to special case the last node, and generate
17050 a range of [last location start, end of function label]. */
17051
17052 if (cfun && crtl->has_bb_partition)
17053 {
17054 bool save_in_cold_section_p = in_cold_section_p;
17055 in_cold_section_p = first_function_block_is_cold;
17056 if (loc_list->last_before_switch == NULL)
17057 in_cold_section_p = !in_cold_section_p;
17058 secname = secname_for_decl (decl);
17059 in_cold_section_p = save_in_cold_section_p;
17060 }
17061 else
17062 secname = secname_for_decl (decl);
17063
17064 for (node = loc_list->first; node; node = node->next)
17065 {
17066 bool range_across_switch = false;
17067 if (GET_CODE (node->loc) == EXPR_LIST
17068 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17069 {
17070 if (GET_CODE (node->loc) == EXPR_LIST)
17071 {
17072 descr = NULL;
17073 /* This requires DW_OP_{,bit_}piece, which is not usable
17074 inside DWARF expressions. */
17075 if (want_address == 2)
17076 descr = dw_sra_loc_expr (decl, node->loc);
17077 }
17078 else
17079 {
17080 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17081 varloc = NOTE_VAR_LOCATION (node->loc);
17082 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17083 }
17084 if (descr)
17085 {
17086 /* If section switch happens in between node->label
17087 and node->next->label (or end of function) and
17088 we can't emit it as a single entry list,
17089 emit two ranges, first one ending at the end
17090 of first partition and second one starting at the
17091 beginning of second partition. */
17092 if (node == loc_list->last_before_switch
17093 && (node != loc_list->first || loc_list->first->next)
17094 && current_function_decl)
17095 {
17096 endname = cfun->fde->dw_fde_end;
17097 endview = 0;
17098 range_across_switch = true;
17099 }
17100 /* The variable has a location between NODE->LABEL and
17101 NODE->NEXT->LABEL. */
17102 else if (node->next)
17103 endname = node->next->label, endview = node->next->view;
17104 /* If the variable has a location at the last label
17105 it keeps its location until the end of function. */
17106 else if (!current_function_decl)
17107 endname = text_end_label, endview = 0;
17108 else
17109 {
17110 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17111 current_function_funcdef_no);
17112 endname = ggc_strdup (label_id);
17113 endview = 0;
17114 }
17115
17116 *listp = new_loc_list (descr, node->label, node->view,
17117 endname, endview, secname);
17118 if (TREE_CODE (decl) == PARM_DECL
17119 && node == loc_list->first
17120 && NOTE_P (node->loc)
17121 && strcmp (node->label, endname) == 0)
17122 (*listp)->force = true;
17123 listp = &(*listp)->dw_loc_next;
17124 }
17125 }
17126
17127 if (cfun
17128 && crtl->has_bb_partition
17129 && node == loc_list->last_before_switch)
17130 {
17131 bool save_in_cold_section_p = in_cold_section_p;
17132 in_cold_section_p = !first_function_block_is_cold;
17133 secname = secname_for_decl (decl);
17134 in_cold_section_p = save_in_cold_section_p;
17135 }
17136
17137 if (range_across_switch)
17138 {
17139 if (GET_CODE (node->loc) == EXPR_LIST)
17140 descr = dw_sra_loc_expr (decl, node->loc);
17141 else
17142 {
17143 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17144 varloc = NOTE_VAR_LOCATION (node->loc);
17145 descr = dw_loc_list_1 (decl, varloc, want_address,
17146 initialized);
17147 }
17148 gcc_assert (descr);
17149 /* The variable has a location between NODE->LABEL and
17150 NODE->NEXT->LABEL. */
17151 if (node->next)
17152 endname = node->next->label, endview = node->next->view;
17153 else
17154 endname = cfun->fde->dw_fde_second_end, endview = 0;
17155 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17156 endname, endview, secname);
17157 listp = &(*listp)->dw_loc_next;
17158 }
17159 }
17160
17161 /* Try to avoid the overhead of a location list emitting a location
17162 expression instead, but only if we didn't have more than one
17163 location entry in the first place. If some entries were not
17164 representable, we don't want to pretend a single entry that was
17165 applies to the entire scope in which the variable is
17166 available. */
17167 if (list && loc_list->first->next)
17168 gen_llsym (list);
17169 else
17170 maybe_gen_llsym (list);
17171
17172 return list;
17173 }
17174
17175 /* Return if the loc_list has only single element and thus can be represented
17176 as location description. */
17177
17178 static bool
17179 single_element_loc_list_p (dw_loc_list_ref list)
17180 {
17181 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17182 return !list->ll_symbol;
17183 }
17184
17185 /* Duplicate a single element of location list. */
17186
17187 static inline dw_loc_descr_ref
17188 copy_loc_descr (dw_loc_descr_ref ref)
17189 {
17190 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17191 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17192 return copy;
17193 }
17194
17195 /* To each location in list LIST append loc descr REF. */
17196
17197 static void
17198 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17199 {
17200 dw_loc_descr_ref copy;
17201 add_loc_descr (&list->expr, ref);
17202 list = list->dw_loc_next;
17203 while (list)
17204 {
17205 copy = copy_loc_descr (ref);
17206 add_loc_descr (&list->expr, copy);
17207 while (copy->dw_loc_next)
17208 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17209 list = list->dw_loc_next;
17210 }
17211 }
17212
17213 /* To each location in list LIST prepend loc descr REF. */
17214
17215 static void
17216 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17217 {
17218 dw_loc_descr_ref copy;
17219 dw_loc_descr_ref ref_end = list->expr;
17220 add_loc_descr (&ref, list->expr);
17221 list->expr = ref;
17222 list = list->dw_loc_next;
17223 while (list)
17224 {
17225 dw_loc_descr_ref end = list->expr;
17226 list->expr = copy = copy_loc_descr (ref);
17227 while (copy->dw_loc_next != ref_end)
17228 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17229 copy->dw_loc_next = end;
17230 list = list->dw_loc_next;
17231 }
17232 }
17233
17234 /* Given two lists RET and LIST
17235 produce location list that is result of adding expression in LIST
17236 to expression in RET on each position in program.
17237 Might be destructive on both RET and LIST.
17238
17239 TODO: We handle only simple cases of RET or LIST having at most one
17240 element. General case would involve sorting the lists in program order
17241 and merging them that will need some additional work.
17242 Adding that will improve quality of debug info especially for SRA-ed
17243 structures. */
17244
17245 static void
17246 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17247 {
17248 if (!list)
17249 return;
17250 if (!*ret)
17251 {
17252 *ret = list;
17253 return;
17254 }
17255 if (!list->dw_loc_next)
17256 {
17257 add_loc_descr_to_each (*ret, list->expr);
17258 return;
17259 }
17260 if (!(*ret)->dw_loc_next)
17261 {
17262 prepend_loc_descr_to_each (list, (*ret)->expr);
17263 *ret = list;
17264 return;
17265 }
17266 expansion_failed (NULL_TREE, NULL_RTX,
17267 "Don't know how to merge two non-trivial"
17268 " location lists.\n");
17269 *ret = NULL;
17270 return;
17271 }
17272
17273 /* LOC is constant expression. Try a luck, look it up in constant
17274 pool and return its loc_descr of its address. */
17275
17276 static dw_loc_descr_ref
17277 cst_pool_loc_descr (tree loc)
17278 {
17279 /* Get an RTL for this, if something has been emitted. */
17280 rtx rtl = lookup_constant_def (loc);
17281
17282 if (!rtl || !MEM_P (rtl))
17283 {
17284 gcc_assert (!rtl);
17285 return 0;
17286 }
17287 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17288
17289 /* TODO: We might get more coverage if we was actually delaying expansion
17290 of all expressions till end of compilation when constant pools are fully
17291 populated. */
17292 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17293 {
17294 expansion_failed (loc, NULL_RTX,
17295 "CST value in contant pool but not marked.");
17296 return 0;
17297 }
17298 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17299 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17300 }
17301
17302 /* Return dw_loc_list representing address of addr_expr LOC
17303 by looking for inner INDIRECT_REF expression and turning
17304 it into simple arithmetics.
17305
17306 See loc_list_from_tree for the meaning of CONTEXT. */
17307
17308 static dw_loc_list_ref
17309 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17310 loc_descr_context *context)
17311 {
17312 tree obj, offset;
17313 poly_int64 bitsize, bitpos, bytepos;
17314 machine_mode mode;
17315 int unsignedp, reversep, volatilep = 0;
17316 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17317
17318 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17319 &bitsize, &bitpos, &offset, &mode,
17320 &unsignedp, &reversep, &volatilep);
17321 STRIP_NOPS (obj);
17322 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17323 {
17324 expansion_failed (loc, NULL_RTX, "bitfield access");
17325 return 0;
17326 }
17327 if (!INDIRECT_REF_P (obj))
17328 {
17329 expansion_failed (obj,
17330 NULL_RTX, "no indirect ref in inner refrence");
17331 return 0;
17332 }
17333 if (!offset && known_eq (bitpos, 0))
17334 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17335 context);
17336 else if (toplev
17337 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17338 && (dwarf_version >= 4 || !dwarf_strict))
17339 {
17340 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17341 if (!list_ret)
17342 return 0;
17343 if (offset)
17344 {
17345 /* Variable offset. */
17346 list_ret1 = loc_list_from_tree (offset, 0, context);
17347 if (list_ret1 == 0)
17348 return 0;
17349 add_loc_list (&list_ret, list_ret1);
17350 if (!list_ret)
17351 return 0;
17352 add_loc_descr_to_each (list_ret,
17353 new_loc_descr (DW_OP_plus, 0, 0));
17354 }
17355 HOST_WIDE_INT value;
17356 if (bytepos.is_constant (&value) && value > 0)
17357 add_loc_descr_to_each (list_ret,
17358 new_loc_descr (DW_OP_plus_uconst, value, 0));
17359 else if (maybe_ne (bytepos, 0))
17360 loc_list_plus_const (list_ret, bytepos);
17361 add_loc_descr_to_each (list_ret,
17362 new_loc_descr (DW_OP_stack_value, 0, 0));
17363 }
17364 return list_ret;
17365 }
17366
17367 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17368 all operations from LOC are nops, move to the last one. Insert in NOPS all
17369 operations that are skipped. */
17370
17371 static void
17372 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17373 hash_set<dw_loc_descr_ref> &nops)
17374 {
17375 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17376 {
17377 nops.add (loc);
17378 loc = loc->dw_loc_next;
17379 }
17380 }
17381
17382 /* Helper for loc_descr_without_nops: free the location description operation
17383 P. */
17384
17385 bool
17386 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17387 {
17388 ggc_free (loc);
17389 return true;
17390 }
17391
17392 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17393 finishes LOC. */
17394
17395 static void
17396 loc_descr_without_nops (dw_loc_descr_ref &loc)
17397 {
17398 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17399 return;
17400
17401 /* Set of all DW_OP_nop operations we remove. */
17402 hash_set<dw_loc_descr_ref> nops;
17403
17404 /* First, strip all prefix NOP operations in order to keep the head of the
17405 operations list. */
17406 loc_descr_to_next_no_nop (loc, nops);
17407
17408 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17409 {
17410 /* For control flow operations: strip "prefix" nops in destination
17411 labels. */
17412 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17413 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17414 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17415 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17416
17417 /* Do the same for the operations that follow, then move to the next
17418 iteration. */
17419 if (cur->dw_loc_next != NULL)
17420 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17421 cur = cur->dw_loc_next;
17422 }
17423
17424 nops.traverse<void *, free_loc_descr> (NULL);
17425 }
17426
17427
17428 struct dwarf_procedure_info;
17429
17430 /* Helper structure for location descriptions generation. */
17431 struct loc_descr_context
17432 {
17433 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17434 NULL_TREE if DW_OP_push_object_address in invalid for this location
17435 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17436 tree context_type;
17437 /* The ..._DECL node that should be translated as a
17438 DW_OP_push_object_address operation. */
17439 tree base_decl;
17440 /* Information about the DWARF procedure we are currently generating. NULL if
17441 we are not generating a DWARF procedure. */
17442 struct dwarf_procedure_info *dpi;
17443 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17444 by consumer. Used for DW_TAG_generic_subrange attributes. */
17445 bool placeholder_arg;
17446 /* True if PLACEHOLDER_EXPR has been seen. */
17447 bool placeholder_seen;
17448 };
17449
17450 /* DWARF procedures generation
17451
17452 DWARF expressions (aka. location descriptions) are used to encode variable
17453 things such as sizes or offsets. Such computations can have redundant parts
17454 that can be factorized in order to reduce the size of the output debug
17455 information. This is the whole point of DWARF procedures.
17456
17457 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17458 already factorized into functions ("size functions") in order to handle very
17459 big and complex types. Such functions are quite simple: they have integral
17460 arguments, they return an integral result and their body contains only a
17461 return statement with arithmetic expressions. This is the only kind of
17462 function we are interested in translating into DWARF procedures, here.
17463
17464 DWARF expressions and DWARF procedure are executed using a stack, so we have
17465 to define some calling convention for them to interact. Let's say that:
17466
17467 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17468 all arguments in reverse order (right-to-left) so that when the DWARF
17469 procedure execution starts, the first argument is the top of the stack.
17470
17471 - Then, when returning, the DWARF procedure must have consumed all arguments
17472 on the stack, must have pushed the result and touched nothing else.
17473
17474 - Each integral argument and the result are integral types can be hold in a
17475 single stack slot.
17476
17477 - We call "frame offset" the number of stack slots that are "under DWARF
17478 procedure control": it includes the arguments slots, the temporaries and
17479 the result slot. Thus, it is equal to the number of arguments when the
17480 procedure execution starts and must be equal to one (the result) when it
17481 returns. */
17482
17483 /* Helper structure used when generating operations for a DWARF procedure. */
17484 struct dwarf_procedure_info
17485 {
17486 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17487 currently translated. */
17488 tree fndecl;
17489 /* The number of arguments FNDECL takes. */
17490 unsigned args_count;
17491 };
17492
17493 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17494 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17495 equate it to this DIE. */
17496
17497 static dw_die_ref
17498 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17499 dw_die_ref parent_die)
17500 {
17501 dw_die_ref dwarf_proc_die;
17502
17503 if ((dwarf_version < 3 && dwarf_strict)
17504 || location == NULL)
17505 return NULL;
17506
17507 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17508 if (fndecl)
17509 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17510 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17511 return dwarf_proc_die;
17512 }
17513
17514 /* Return whether TYPE is a supported type as a DWARF procedure argument
17515 type or return type (we handle only scalar types and pointer types that
17516 aren't wider than the DWARF expression evaluation stack. */
17517
17518 static bool
17519 is_handled_procedure_type (tree type)
17520 {
17521 return ((INTEGRAL_TYPE_P (type)
17522 || TREE_CODE (type) == OFFSET_TYPE
17523 || TREE_CODE (type) == POINTER_TYPE)
17524 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17525 }
17526
17527 /* Helper for resolve_args_picking: do the same but stop when coming across
17528 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17529 offset *before* evaluating the corresponding operation. */
17530
17531 static bool
17532 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17533 struct dwarf_procedure_info *dpi,
17534 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17535 {
17536 /* The "frame_offset" identifier is already used to name a macro... */
17537 unsigned frame_offset_ = initial_frame_offset;
17538 dw_loc_descr_ref l;
17539
17540 for (l = loc; l != NULL;)
17541 {
17542 bool existed;
17543 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17544
17545 /* If we already met this node, there is nothing to compute anymore. */
17546 if (existed)
17547 {
17548 /* Make sure that the stack size is consistent wherever the execution
17549 flow comes from. */
17550 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17551 break;
17552 }
17553 l_frame_offset = frame_offset_;
17554
17555 /* If needed, relocate the picking offset with respect to the frame
17556 offset. */
17557 if (l->frame_offset_rel)
17558 {
17559 unsigned HOST_WIDE_INT off;
17560 switch (l->dw_loc_opc)
17561 {
17562 case DW_OP_pick:
17563 off = l->dw_loc_oprnd1.v.val_unsigned;
17564 break;
17565 case DW_OP_dup:
17566 off = 0;
17567 break;
17568 case DW_OP_over:
17569 off = 1;
17570 break;
17571 default:
17572 gcc_unreachable ();
17573 }
17574 /* frame_offset_ is the size of the current stack frame, including
17575 incoming arguments. Besides, the arguments are pushed
17576 right-to-left. Thus, in order to access the Nth argument from
17577 this operation node, the picking has to skip temporaries *plus*
17578 one stack slot per argument (0 for the first one, 1 for the second
17579 one, etc.).
17580
17581 The targetted argument number (N) is already set as the operand,
17582 and the number of temporaries can be computed with:
17583 frame_offsets_ - dpi->args_count */
17584 off += frame_offset_ - dpi->args_count;
17585
17586 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17587 if (off > 255)
17588 return false;
17589
17590 if (off == 0)
17591 {
17592 l->dw_loc_opc = DW_OP_dup;
17593 l->dw_loc_oprnd1.v.val_unsigned = 0;
17594 }
17595 else if (off == 1)
17596 {
17597 l->dw_loc_opc = DW_OP_over;
17598 l->dw_loc_oprnd1.v.val_unsigned = 0;
17599 }
17600 else
17601 {
17602 l->dw_loc_opc = DW_OP_pick;
17603 l->dw_loc_oprnd1.v.val_unsigned = off;
17604 }
17605 }
17606
17607 /* Update frame_offset according to the effect the current operation has
17608 on the stack. */
17609 switch (l->dw_loc_opc)
17610 {
17611 case DW_OP_deref:
17612 case DW_OP_swap:
17613 case DW_OP_rot:
17614 case DW_OP_abs:
17615 case DW_OP_neg:
17616 case DW_OP_not:
17617 case DW_OP_plus_uconst:
17618 case DW_OP_skip:
17619 case DW_OP_reg0:
17620 case DW_OP_reg1:
17621 case DW_OP_reg2:
17622 case DW_OP_reg3:
17623 case DW_OP_reg4:
17624 case DW_OP_reg5:
17625 case DW_OP_reg6:
17626 case DW_OP_reg7:
17627 case DW_OP_reg8:
17628 case DW_OP_reg9:
17629 case DW_OP_reg10:
17630 case DW_OP_reg11:
17631 case DW_OP_reg12:
17632 case DW_OP_reg13:
17633 case DW_OP_reg14:
17634 case DW_OP_reg15:
17635 case DW_OP_reg16:
17636 case DW_OP_reg17:
17637 case DW_OP_reg18:
17638 case DW_OP_reg19:
17639 case DW_OP_reg20:
17640 case DW_OP_reg21:
17641 case DW_OP_reg22:
17642 case DW_OP_reg23:
17643 case DW_OP_reg24:
17644 case DW_OP_reg25:
17645 case DW_OP_reg26:
17646 case DW_OP_reg27:
17647 case DW_OP_reg28:
17648 case DW_OP_reg29:
17649 case DW_OP_reg30:
17650 case DW_OP_reg31:
17651 case DW_OP_bregx:
17652 case DW_OP_piece:
17653 case DW_OP_deref_size:
17654 case DW_OP_nop:
17655 case DW_OP_bit_piece:
17656 case DW_OP_implicit_value:
17657 case DW_OP_stack_value:
17658 break;
17659
17660 case DW_OP_addr:
17661 case DW_OP_const1u:
17662 case DW_OP_const1s:
17663 case DW_OP_const2u:
17664 case DW_OP_const2s:
17665 case DW_OP_const4u:
17666 case DW_OP_const4s:
17667 case DW_OP_const8u:
17668 case DW_OP_const8s:
17669 case DW_OP_constu:
17670 case DW_OP_consts:
17671 case DW_OP_dup:
17672 case DW_OP_over:
17673 case DW_OP_pick:
17674 case DW_OP_lit0:
17675 case DW_OP_lit1:
17676 case DW_OP_lit2:
17677 case DW_OP_lit3:
17678 case DW_OP_lit4:
17679 case DW_OP_lit5:
17680 case DW_OP_lit6:
17681 case DW_OP_lit7:
17682 case DW_OP_lit8:
17683 case DW_OP_lit9:
17684 case DW_OP_lit10:
17685 case DW_OP_lit11:
17686 case DW_OP_lit12:
17687 case DW_OP_lit13:
17688 case DW_OP_lit14:
17689 case DW_OP_lit15:
17690 case DW_OP_lit16:
17691 case DW_OP_lit17:
17692 case DW_OP_lit18:
17693 case DW_OP_lit19:
17694 case DW_OP_lit20:
17695 case DW_OP_lit21:
17696 case DW_OP_lit22:
17697 case DW_OP_lit23:
17698 case DW_OP_lit24:
17699 case DW_OP_lit25:
17700 case DW_OP_lit26:
17701 case DW_OP_lit27:
17702 case DW_OP_lit28:
17703 case DW_OP_lit29:
17704 case DW_OP_lit30:
17705 case DW_OP_lit31:
17706 case DW_OP_breg0:
17707 case DW_OP_breg1:
17708 case DW_OP_breg2:
17709 case DW_OP_breg3:
17710 case DW_OP_breg4:
17711 case DW_OP_breg5:
17712 case DW_OP_breg6:
17713 case DW_OP_breg7:
17714 case DW_OP_breg8:
17715 case DW_OP_breg9:
17716 case DW_OP_breg10:
17717 case DW_OP_breg11:
17718 case DW_OP_breg12:
17719 case DW_OP_breg13:
17720 case DW_OP_breg14:
17721 case DW_OP_breg15:
17722 case DW_OP_breg16:
17723 case DW_OP_breg17:
17724 case DW_OP_breg18:
17725 case DW_OP_breg19:
17726 case DW_OP_breg20:
17727 case DW_OP_breg21:
17728 case DW_OP_breg22:
17729 case DW_OP_breg23:
17730 case DW_OP_breg24:
17731 case DW_OP_breg25:
17732 case DW_OP_breg26:
17733 case DW_OP_breg27:
17734 case DW_OP_breg28:
17735 case DW_OP_breg29:
17736 case DW_OP_breg30:
17737 case DW_OP_breg31:
17738 case DW_OP_fbreg:
17739 case DW_OP_push_object_address:
17740 case DW_OP_call_frame_cfa:
17741 case DW_OP_GNU_variable_value:
17742 ++frame_offset_;
17743 break;
17744
17745 case DW_OP_drop:
17746 case DW_OP_xderef:
17747 case DW_OP_and:
17748 case DW_OP_div:
17749 case DW_OP_minus:
17750 case DW_OP_mod:
17751 case DW_OP_mul:
17752 case DW_OP_or:
17753 case DW_OP_plus:
17754 case DW_OP_shl:
17755 case DW_OP_shr:
17756 case DW_OP_shra:
17757 case DW_OP_xor:
17758 case DW_OP_bra:
17759 case DW_OP_eq:
17760 case DW_OP_ge:
17761 case DW_OP_gt:
17762 case DW_OP_le:
17763 case DW_OP_lt:
17764 case DW_OP_ne:
17765 case DW_OP_regx:
17766 case DW_OP_xderef_size:
17767 --frame_offset_;
17768 break;
17769
17770 case DW_OP_call2:
17771 case DW_OP_call4:
17772 case DW_OP_call_ref:
17773 {
17774 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17775 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17776
17777 if (stack_usage == NULL)
17778 return false;
17779 frame_offset_ += *stack_usage;
17780 break;
17781 }
17782
17783 case DW_OP_implicit_pointer:
17784 case DW_OP_entry_value:
17785 case DW_OP_const_type:
17786 case DW_OP_regval_type:
17787 case DW_OP_deref_type:
17788 case DW_OP_convert:
17789 case DW_OP_reinterpret:
17790 case DW_OP_form_tls_address:
17791 case DW_OP_GNU_push_tls_address:
17792 case DW_OP_GNU_uninit:
17793 case DW_OP_GNU_encoded_addr:
17794 case DW_OP_GNU_implicit_pointer:
17795 case DW_OP_GNU_entry_value:
17796 case DW_OP_GNU_const_type:
17797 case DW_OP_GNU_regval_type:
17798 case DW_OP_GNU_deref_type:
17799 case DW_OP_GNU_convert:
17800 case DW_OP_GNU_reinterpret:
17801 case DW_OP_GNU_parameter_ref:
17802 /* loc_list_from_tree will probably not output these operations for
17803 size functions, so assume they will not appear here. */
17804 /* Fall through... */
17805
17806 default:
17807 gcc_unreachable ();
17808 }
17809
17810 /* Now, follow the control flow (except subroutine calls). */
17811 switch (l->dw_loc_opc)
17812 {
17813 case DW_OP_bra:
17814 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17815 frame_offsets))
17816 return false;
17817 /* Fall through. */
17818
17819 case DW_OP_skip:
17820 l = l->dw_loc_oprnd1.v.val_loc;
17821 break;
17822
17823 case DW_OP_stack_value:
17824 return true;
17825
17826 default:
17827 l = l->dw_loc_next;
17828 break;
17829 }
17830 }
17831
17832 return true;
17833 }
17834
17835 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17836 operations) in order to resolve the operand of DW_OP_pick operations that
17837 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17838 offset *before* LOC is executed. Return if all relocations were
17839 successful. */
17840
17841 static bool
17842 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17843 struct dwarf_procedure_info *dpi)
17844 {
17845 /* Associate to all visited operations the frame offset *before* evaluating
17846 this operation. */
17847 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17848
17849 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17850 frame_offsets);
17851 }
17852
17853 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17854 Return NULL if it is not possible. */
17855
17856 static dw_die_ref
17857 function_to_dwarf_procedure (tree fndecl)
17858 {
17859 struct loc_descr_context ctx;
17860 struct dwarf_procedure_info dpi;
17861 dw_die_ref dwarf_proc_die;
17862 tree tree_body = DECL_SAVED_TREE (fndecl);
17863 dw_loc_descr_ref loc_body, epilogue;
17864
17865 tree cursor;
17866 unsigned i;
17867
17868 /* Do not generate multiple DWARF procedures for the same function
17869 declaration. */
17870 dwarf_proc_die = lookup_decl_die (fndecl);
17871 if (dwarf_proc_die != NULL)
17872 return dwarf_proc_die;
17873
17874 /* DWARF procedures are available starting with the DWARFv3 standard. */
17875 if (dwarf_version < 3 && dwarf_strict)
17876 return NULL;
17877
17878 /* We handle only functions for which we still have a body, that return a
17879 supported type and that takes arguments with supported types. Note that
17880 there is no point translating functions that return nothing. */
17881 if (tree_body == NULL_TREE
17882 || DECL_RESULT (fndecl) == NULL_TREE
17883 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17884 return NULL;
17885
17886 for (cursor = DECL_ARGUMENTS (fndecl);
17887 cursor != NULL_TREE;
17888 cursor = TREE_CHAIN (cursor))
17889 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17890 return NULL;
17891
17892 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17893 if (TREE_CODE (tree_body) != RETURN_EXPR)
17894 return NULL;
17895 tree_body = TREE_OPERAND (tree_body, 0);
17896 if (TREE_CODE (tree_body) != MODIFY_EXPR
17897 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17898 return NULL;
17899 tree_body = TREE_OPERAND (tree_body, 1);
17900
17901 /* Try to translate the body expression itself. Note that this will probably
17902 cause an infinite recursion if its call graph has a cycle. This is very
17903 unlikely for size functions, however, so don't bother with such things at
17904 the moment. */
17905 ctx.context_type = NULL_TREE;
17906 ctx.base_decl = NULL_TREE;
17907 ctx.dpi = &dpi;
17908 ctx.placeholder_arg = false;
17909 ctx.placeholder_seen = false;
17910 dpi.fndecl = fndecl;
17911 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17912 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17913 if (!loc_body)
17914 return NULL;
17915
17916 /* After evaluating all operands in "loc_body", we should still have on the
17917 stack all arguments plus the desired function result (top of the stack).
17918 Generate code in order to keep only the result in our stack frame. */
17919 epilogue = NULL;
17920 for (i = 0; i < dpi.args_count; ++i)
17921 {
17922 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17923 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17924 op_couple->dw_loc_next->dw_loc_next = epilogue;
17925 epilogue = op_couple;
17926 }
17927 add_loc_descr (&loc_body, epilogue);
17928 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17929 return NULL;
17930
17931 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17932 because they are considered useful. Now there is an epilogue, they are
17933 not anymore, so give it another try. */
17934 loc_descr_without_nops (loc_body);
17935
17936 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17937 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17938 though, given that size functions do not come from source, so they should
17939 not have a dedicated DW_TAG_subprogram DIE. */
17940 dwarf_proc_die
17941 = new_dwarf_proc_die (loc_body, fndecl,
17942 get_context_die (DECL_CONTEXT (fndecl)));
17943
17944 /* The called DWARF procedure consumes one stack slot per argument and
17945 returns one stack slot. */
17946 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17947
17948 return dwarf_proc_die;
17949 }
17950
17951
17952 /* Generate Dwarf location list representing LOC.
17953 If WANT_ADDRESS is false, expression computing LOC will be computed
17954 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17955 if WANT_ADDRESS is 2, expression computing address useable in location
17956 will be returned (i.e. DW_OP_reg can be used
17957 to refer to register values).
17958
17959 CONTEXT provides information to customize the location descriptions
17960 generation. Its context_type field specifies what type is implicitly
17961 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17962 will not be generated.
17963
17964 Its DPI field determines whether we are generating a DWARF expression for a
17965 DWARF procedure, so PARM_DECL references are processed specifically.
17966
17967 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17968 and dpi fields were null. */
17969
17970 static dw_loc_list_ref
17971 loc_list_from_tree_1 (tree loc, int want_address,
17972 struct loc_descr_context *context)
17973 {
17974 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17975 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17976 int have_address = 0;
17977 enum dwarf_location_atom op;
17978
17979 /* ??? Most of the time we do not take proper care for sign/zero
17980 extending the values properly. Hopefully this won't be a real
17981 problem... */
17982
17983 if (context != NULL
17984 && context->base_decl == loc
17985 && want_address == 0)
17986 {
17987 if (dwarf_version >= 3 || !dwarf_strict)
17988 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17989 NULL, 0, NULL, 0, NULL);
17990 else
17991 return NULL;
17992 }
17993
17994 switch (TREE_CODE (loc))
17995 {
17996 case ERROR_MARK:
17997 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17998 return 0;
17999
18000 case PLACEHOLDER_EXPR:
18001 /* This case involves extracting fields from an object to determine the
18002 position of other fields. It is supposed to appear only as the first
18003 operand of COMPONENT_REF nodes and to reference precisely the type
18004 that the context allows. */
18005 if (context != NULL
18006 && TREE_TYPE (loc) == context->context_type
18007 && want_address >= 1)
18008 {
18009 if (dwarf_version >= 3 || !dwarf_strict)
18010 {
18011 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18012 have_address = 1;
18013 break;
18014 }
18015 else
18016 return NULL;
18017 }
18018 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18019 the single argument passed by consumer. */
18020 else if (context != NULL
18021 && context->placeholder_arg
18022 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18023 && want_address == 0)
18024 {
18025 ret = new_loc_descr (DW_OP_pick, 0, 0);
18026 ret->frame_offset_rel = 1;
18027 context->placeholder_seen = true;
18028 break;
18029 }
18030 else
18031 expansion_failed (loc, NULL_RTX,
18032 "PLACEHOLDER_EXPR for an unexpected type");
18033 break;
18034
18035 case CALL_EXPR:
18036 {
18037 const int nargs = call_expr_nargs (loc);
18038 tree callee = get_callee_fndecl (loc);
18039 int i;
18040 dw_die_ref dwarf_proc;
18041
18042 if (callee == NULL_TREE)
18043 goto call_expansion_failed;
18044
18045 /* We handle only functions that return an integer. */
18046 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18047 goto call_expansion_failed;
18048
18049 dwarf_proc = function_to_dwarf_procedure (callee);
18050 if (dwarf_proc == NULL)
18051 goto call_expansion_failed;
18052
18053 /* Evaluate arguments right-to-left so that the first argument will
18054 be the top-most one on the stack. */
18055 for (i = nargs - 1; i >= 0; --i)
18056 {
18057 dw_loc_descr_ref loc_descr
18058 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18059 context);
18060
18061 if (loc_descr == NULL)
18062 goto call_expansion_failed;
18063
18064 add_loc_descr (&ret, loc_descr);
18065 }
18066
18067 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18068 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18069 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18070 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18071 add_loc_descr (&ret, ret1);
18072 break;
18073
18074 call_expansion_failed:
18075 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18076 /* There are no opcodes for these operations. */
18077 return 0;
18078 }
18079
18080 case PREINCREMENT_EXPR:
18081 case PREDECREMENT_EXPR:
18082 case POSTINCREMENT_EXPR:
18083 case POSTDECREMENT_EXPR:
18084 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18085 /* There are no opcodes for these operations. */
18086 return 0;
18087
18088 case ADDR_EXPR:
18089 /* If we already want an address, see if there is INDIRECT_REF inside
18090 e.g. for &this->field. */
18091 if (want_address)
18092 {
18093 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18094 (loc, want_address == 2, context);
18095 if (list_ret)
18096 have_address = 1;
18097 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18098 && (ret = cst_pool_loc_descr (loc)))
18099 have_address = 1;
18100 }
18101 /* Otherwise, process the argument and look for the address. */
18102 if (!list_ret && !ret)
18103 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18104 else
18105 {
18106 if (want_address)
18107 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18108 return NULL;
18109 }
18110 break;
18111
18112 case VAR_DECL:
18113 if (DECL_THREAD_LOCAL_P (loc))
18114 {
18115 rtx rtl;
18116 enum dwarf_location_atom tls_op;
18117 enum dtprel_bool dtprel = dtprel_false;
18118
18119 if (targetm.have_tls)
18120 {
18121 /* If this is not defined, we have no way to emit the
18122 data. */
18123 if (!targetm.asm_out.output_dwarf_dtprel)
18124 return 0;
18125
18126 /* The way DW_OP_GNU_push_tls_address is specified, we
18127 can only look up addresses of objects in the current
18128 module. We used DW_OP_addr as first op, but that's
18129 wrong, because DW_OP_addr is relocated by the debug
18130 info consumer, while DW_OP_GNU_push_tls_address
18131 operand shouldn't be. */
18132 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18133 return 0;
18134 dtprel = dtprel_true;
18135 /* We check for DWARF 5 here because gdb did not implement
18136 DW_OP_form_tls_address until after 7.12. */
18137 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18138 : DW_OP_GNU_push_tls_address);
18139 }
18140 else
18141 {
18142 if (!targetm.emutls.debug_form_tls_address
18143 || !(dwarf_version >= 3 || !dwarf_strict))
18144 return 0;
18145 /* We stuffed the control variable into the DECL_VALUE_EXPR
18146 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18147 no longer appear in gimple code. We used the control
18148 variable in specific so that we could pick it up here. */
18149 loc = DECL_VALUE_EXPR (loc);
18150 tls_op = DW_OP_form_tls_address;
18151 }
18152
18153 rtl = rtl_for_decl_location (loc);
18154 if (rtl == NULL_RTX)
18155 return 0;
18156
18157 if (!MEM_P (rtl))
18158 return 0;
18159 rtl = XEXP (rtl, 0);
18160 if (! CONSTANT_P (rtl))
18161 return 0;
18162
18163 ret = new_addr_loc_descr (rtl, dtprel);
18164 ret1 = new_loc_descr (tls_op, 0, 0);
18165 add_loc_descr (&ret, ret1);
18166
18167 have_address = 1;
18168 break;
18169 }
18170 /* FALLTHRU */
18171
18172 case PARM_DECL:
18173 if (context != NULL && context->dpi != NULL
18174 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18175 {
18176 /* We are generating code for a DWARF procedure and we want to access
18177 one of its arguments: find the appropriate argument offset and let
18178 the resolve_args_picking pass compute the offset that complies
18179 with the stack frame size. */
18180 unsigned i = 0;
18181 tree cursor;
18182
18183 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18184 cursor != NULL_TREE && cursor != loc;
18185 cursor = TREE_CHAIN (cursor), ++i)
18186 ;
18187 /* If we are translating a DWARF procedure, all referenced parameters
18188 must belong to the current function. */
18189 gcc_assert (cursor != NULL_TREE);
18190
18191 ret = new_loc_descr (DW_OP_pick, i, 0);
18192 ret->frame_offset_rel = 1;
18193 break;
18194 }
18195 /* FALLTHRU */
18196
18197 case RESULT_DECL:
18198 if (DECL_HAS_VALUE_EXPR_P (loc))
18199 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18200 want_address, context);
18201 /* FALLTHRU */
18202
18203 case FUNCTION_DECL:
18204 {
18205 rtx rtl;
18206 var_loc_list *loc_list = lookup_decl_loc (loc);
18207
18208 if (loc_list && loc_list->first)
18209 {
18210 list_ret = dw_loc_list (loc_list, loc, want_address);
18211 have_address = want_address != 0;
18212 break;
18213 }
18214 rtl = rtl_for_decl_location (loc);
18215 if (rtl == NULL_RTX)
18216 {
18217 if (TREE_CODE (loc) != FUNCTION_DECL
18218 && early_dwarf
18219 && current_function_decl
18220 && want_address != 1
18221 && ! DECL_IGNORED_P (loc)
18222 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18223 || POINTER_TYPE_P (TREE_TYPE (loc)))
18224 && DECL_CONTEXT (loc) == current_function_decl
18225 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18226 <= DWARF2_ADDR_SIZE))
18227 {
18228 dw_die_ref ref = lookup_decl_die (loc);
18229 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18230 if (ref)
18231 {
18232 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18233 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18234 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18235 }
18236 else
18237 {
18238 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18239 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18240 }
18241 break;
18242 }
18243 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18244 return 0;
18245 }
18246 else if (CONST_INT_P (rtl))
18247 {
18248 HOST_WIDE_INT val = INTVAL (rtl);
18249 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18250 val &= GET_MODE_MASK (DECL_MODE (loc));
18251 ret = int_loc_descriptor (val);
18252 }
18253 else if (GET_CODE (rtl) == CONST_STRING)
18254 {
18255 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18256 return 0;
18257 }
18258 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18259 ret = new_addr_loc_descr (rtl, dtprel_false);
18260 else
18261 {
18262 machine_mode mode, mem_mode;
18263
18264 /* Certain constructs can only be represented at top-level. */
18265 if (want_address == 2)
18266 {
18267 ret = loc_descriptor (rtl, VOIDmode,
18268 VAR_INIT_STATUS_INITIALIZED);
18269 have_address = 1;
18270 }
18271 else
18272 {
18273 mode = GET_MODE (rtl);
18274 mem_mode = VOIDmode;
18275 if (MEM_P (rtl))
18276 {
18277 mem_mode = mode;
18278 mode = get_address_mode (rtl);
18279 rtl = XEXP (rtl, 0);
18280 have_address = 1;
18281 }
18282 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18283 VAR_INIT_STATUS_INITIALIZED);
18284 }
18285 if (!ret)
18286 expansion_failed (loc, rtl,
18287 "failed to produce loc descriptor for rtl");
18288 }
18289 }
18290 break;
18291
18292 case MEM_REF:
18293 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18294 {
18295 have_address = 1;
18296 goto do_plus;
18297 }
18298 /* Fallthru. */
18299 case INDIRECT_REF:
18300 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18301 have_address = 1;
18302 break;
18303
18304 case TARGET_MEM_REF:
18305 case SSA_NAME:
18306 case DEBUG_EXPR_DECL:
18307 return NULL;
18308
18309 case COMPOUND_EXPR:
18310 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18311 context);
18312
18313 CASE_CONVERT:
18314 case VIEW_CONVERT_EXPR:
18315 case SAVE_EXPR:
18316 case MODIFY_EXPR:
18317 case NON_LVALUE_EXPR:
18318 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18319 context);
18320
18321 case COMPONENT_REF:
18322 case BIT_FIELD_REF:
18323 case ARRAY_REF:
18324 case ARRAY_RANGE_REF:
18325 case REALPART_EXPR:
18326 case IMAGPART_EXPR:
18327 {
18328 tree obj, offset;
18329 poly_int64 bitsize, bitpos, bytepos;
18330 machine_mode mode;
18331 int unsignedp, reversep, volatilep = 0;
18332
18333 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18334 &unsignedp, &reversep, &volatilep);
18335
18336 gcc_assert (obj != loc);
18337
18338 list_ret = loc_list_from_tree_1 (obj,
18339 want_address == 2
18340 && known_eq (bitpos, 0)
18341 && !offset ? 2 : 1,
18342 context);
18343 /* TODO: We can extract value of the small expression via shifting even
18344 for nonzero bitpos. */
18345 if (list_ret == 0)
18346 return 0;
18347 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18348 || !multiple_p (bitsize, BITS_PER_UNIT))
18349 {
18350 expansion_failed (loc, NULL_RTX,
18351 "bitfield access");
18352 return 0;
18353 }
18354
18355 if (offset != NULL_TREE)
18356 {
18357 /* Variable offset. */
18358 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18359 if (list_ret1 == 0)
18360 return 0;
18361 add_loc_list (&list_ret, list_ret1);
18362 if (!list_ret)
18363 return 0;
18364 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18365 }
18366
18367 HOST_WIDE_INT value;
18368 if (bytepos.is_constant (&value) && value > 0)
18369 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18370 value, 0));
18371 else if (maybe_ne (bytepos, 0))
18372 loc_list_plus_const (list_ret, bytepos);
18373
18374 have_address = 1;
18375 break;
18376 }
18377
18378 case INTEGER_CST:
18379 if ((want_address || !tree_fits_shwi_p (loc))
18380 && (ret = cst_pool_loc_descr (loc)))
18381 have_address = 1;
18382 else if (want_address == 2
18383 && tree_fits_shwi_p (loc)
18384 && (ret = address_of_int_loc_descriptor
18385 (int_size_in_bytes (TREE_TYPE (loc)),
18386 tree_to_shwi (loc))))
18387 have_address = 1;
18388 else if (tree_fits_shwi_p (loc))
18389 ret = int_loc_descriptor (tree_to_shwi (loc));
18390 else if (tree_fits_uhwi_p (loc))
18391 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18392 else
18393 {
18394 expansion_failed (loc, NULL_RTX,
18395 "Integer operand is not host integer");
18396 return 0;
18397 }
18398 break;
18399
18400 case CONSTRUCTOR:
18401 case REAL_CST:
18402 case STRING_CST:
18403 case COMPLEX_CST:
18404 if ((ret = cst_pool_loc_descr (loc)))
18405 have_address = 1;
18406 else if (TREE_CODE (loc) == CONSTRUCTOR)
18407 {
18408 tree type = TREE_TYPE (loc);
18409 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18410 unsigned HOST_WIDE_INT offset = 0;
18411 unsigned HOST_WIDE_INT cnt;
18412 constructor_elt *ce;
18413
18414 if (TREE_CODE (type) == RECORD_TYPE)
18415 {
18416 /* This is very limited, but it's enough to output
18417 pointers to member functions, as long as the
18418 referenced function is defined in the current
18419 translation unit. */
18420 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18421 {
18422 tree val = ce->value;
18423
18424 tree field = ce->index;
18425
18426 if (val)
18427 STRIP_NOPS (val);
18428
18429 if (!field || DECL_BIT_FIELD (field))
18430 {
18431 expansion_failed (loc, NULL_RTX,
18432 "bitfield in record type constructor");
18433 size = offset = (unsigned HOST_WIDE_INT)-1;
18434 ret = NULL;
18435 break;
18436 }
18437
18438 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18439 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18440 gcc_assert (pos + fieldsize <= size);
18441 if (pos < offset)
18442 {
18443 expansion_failed (loc, NULL_RTX,
18444 "out-of-order fields in record constructor");
18445 size = offset = (unsigned HOST_WIDE_INT)-1;
18446 ret = NULL;
18447 break;
18448 }
18449 if (pos > offset)
18450 {
18451 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18452 add_loc_descr (&ret, ret1);
18453 offset = pos;
18454 }
18455 if (val && fieldsize != 0)
18456 {
18457 ret1 = loc_descriptor_from_tree (val, want_address, context);
18458 if (!ret1)
18459 {
18460 expansion_failed (loc, NULL_RTX,
18461 "unsupported expression in field");
18462 size = offset = (unsigned HOST_WIDE_INT)-1;
18463 ret = NULL;
18464 break;
18465 }
18466 add_loc_descr (&ret, ret1);
18467 }
18468 if (fieldsize)
18469 {
18470 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18471 add_loc_descr (&ret, ret1);
18472 offset = pos + fieldsize;
18473 }
18474 }
18475
18476 if (offset != size)
18477 {
18478 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18479 add_loc_descr (&ret, ret1);
18480 offset = size;
18481 }
18482
18483 have_address = !!want_address;
18484 }
18485 else
18486 expansion_failed (loc, NULL_RTX,
18487 "constructor of non-record type");
18488 }
18489 else
18490 /* We can construct small constants here using int_loc_descriptor. */
18491 expansion_failed (loc, NULL_RTX,
18492 "constructor or constant not in constant pool");
18493 break;
18494
18495 case TRUTH_AND_EXPR:
18496 case TRUTH_ANDIF_EXPR:
18497 case BIT_AND_EXPR:
18498 op = DW_OP_and;
18499 goto do_binop;
18500
18501 case TRUTH_XOR_EXPR:
18502 case BIT_XOR_EXPR:
18503 op = DW_OP_xor;
18504 goto do_binop;
18505
18506 case TRUTH_OR_EXPR:
18507 case TRUTH_ORIF_EXPR:
18508 case BIT_IOR_EXPR:
18509 op = DW_OP_or;
18510 goto do_binop;
18511
18512 case FLOOR_DIV_EXPR:
18513 case CEIL_DIV_EXPR:
18514 case ROUND_DIV_EXPR:
18515 case TRUNC_DIV_EXPR:
18516 case EXACT_DIV_EXPR:
18517 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18518 return 0;
18519 op = DW_OP_div;
18520 goto do_binop;
18521
18522 case MINUS_EXPR:
18523 op = DW_OP_minus;
18524 goto do_binop;
18525
18526 case FLOOR_MOD_EXPR:
18527 case CEIL_MOD_EXPR:
18528 case ROUND_MOD_EXPR:
18529 case TRUNC_MOD_EXPR:
18530 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18531 {
18532 op = DW_OP_mod;
18533 goto do_binop;
18534 }
18535 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18536 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18537 if (list_ret == 0 || list_ret1 == 0)
18538 return 0;
18539
18540 add_loc_list (&list_ret, list_ret1);
18541 if (list_ret == 0)
18542 return 0;
18543 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18544 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18545 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18546 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18547 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18548 break;
18549
18550 case MULT_EXPR:
18551 op = DW_OP_mul;
18552 goto do_binop;
18553
18554 case LSHIFT_EXPR:
18555 op = DW_OP_shl;
18556 goto do_binop;
18557
18558 case RSHIFT_EXPR:
18559 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18560 goto do_binop;
18561
18562 case POINTER_PLUS_EXPR:
18563 case PLUS_EXPR:
18564 do_plus:
18565 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18566 {
18567 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18568 smarter to encode their opposite. The DW_OP_plus_uconst operation
18569 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18570 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18571 bytes, Y being the size of the operation that pushes the opposite
18572 of the addend. So let's choose the smallest representation. */
18573 const tree tree_addend = TREE_OPERAND (loc, 1);
18574 offset_int wi_addend;
18575 HOST_WIDE_INT shwi_addend;
18576 dw_loc_descr_ref loc_naddend;
18577
18578 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18579 if (list_ret == 0)
18580 return 0;
18581
18582 /* Try to get the literal to push. It is the opposite of the addend,
18583 so as we rely on wrapping during DWARF evaluation, first decode
18584 the literal as a "DWARF-sized" signed number. */
18585 wi_addend = wi::to_offset (tree_addend);
18586 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18587 shwi_addend = wi_addend.to_shwi ();
18588 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18589 ? int_loc_descriptor (-shwi_addend)
18590 : NULL;
18591
18592 if (loc_naddend != NULL
18593 && ((unsigned) size_of_uleb128 (shwi_addend)
18594 > size_of_loc_descr (loc_naddend)))
18595 {
18596 add_loc_descr_to_each (list_ret, loc_naddend);
18597 add_loc_descr_to_each (list_ret,
18598 new_loc_descr (DW_OP_minus, 0, 0));
18599 }
18600 else
18601 {
18602 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18603 {
18604 loc_naddend = loc_cur;
18605 loc_cur = loc_cur->dw_loc_next;
18606 ggc_free (loc_naddend);
18607 }
18608 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18609 }
18610 break;
18611 }
18612
18613 op = DW_OP_plus;
18614 goto do_binop;
18615
18616 case LE_EXPR:
18617 op = DW_OP_le;
18618 goto do_comp_binop;
18619
18620 case GE_EXPR:
18621 op = DW_OP_ge;
18622 goto do_comp_binop;
18623
18624 case LT_EXPR:
18625 op = DW_OP_lt;
18626 goto do_comp_binop;
18627
18628 case GT_EXPR:
18629 op = DW_OP_gt;
18630 goto do_comp_binop;
18631
18632 do_comp_binop:
18633 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18634 {
18635 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18636 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18637 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18638 TREE_CODE (loc));
18639 break;
18640 }
18641 else
18642 goto do_binop;
18643
18644 case EQ_EXPR:
18645 op = DW_OP_eq;
18646 goto do_binop;
18647
18648 case NE_EXPR:
18649 op = DW_OP_ne;
18650 goto do_binop;
18651
18652 do_binop:
18653 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18654 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18655 if (list_ret == 0 || list_ret1 == 0)
18656 return 0;
18657
18658 add_loc_list (&list_ret, list_ret1);
18659 if (list_ret == 0)
18660 return 0;
18661 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18662 break;
18663
18664 case TRUTH_NOT_EXPR:
18665 case BIT_NOT_EXPR:
18666 op = DW_OP_not;
18667 goto do_unop;
18668
18669 case ABS_EXPR:
18670 op = DW_OP_abs;
18671 goto do_unop;
18672
18673 case NEGATE_EXPR:
18674 op = DW_OP_neg;
18675 goto do_unop;
18676
18677 do_unop:
18678 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18679 if (list_ret == 0)
18680 return 0;
18681
18682 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18683 break;
18684
18685 case MIN_EXPR:
18686 case MAX_EXPR:
18687 {
18688 const enum tree_code code =
18689 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18690
18691 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18692 build2 (code, integer_type_node,
18693 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18694 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18695 }
18696
18697 /* fall through */
18698
18699 case COND_EXPR:
18700 {
18701 dw_loc_descr_ref lhs
18702 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18703 dw_loc_list_ref rhs
18704 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18705 dw_loc_descr_ref bra_node, jump_node, tmp;
18706
18707 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18708 if (list_ret == 0 || lhs == 0 || rhs == 0)
18709 return 0;
18710
18711 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18712 add_loc_descr_to_each (list_ret, bra_node);
18713
18714 add_loc_list (&list_ret, rhs);
18715 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18716 add_loc_descr_to_each (list_ret, jump_node);
18717
18718 add_loc_descr_to_each (list_ret, lhs);
18719 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18720 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18721
18722 /* ??? Need a node to point the skip at. Use a nop. */
18723 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18724 add_loc_descr_to_each (list_ret, tmp);
18725 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18726 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18727 }
18728 break;
18729
18730 case FIX_TRUNC_EXPR:
18731 return 0;
18732
18733 default:
18734 /* Leave front-end specific codes as simply unknown. This comes
18735 up, for instance, with the C STMT_EXPR. */
18736 if ((unsigned int) TREE_CODE (loc)
18737 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18738 {
18739 expansion_failed (loc, NULL_RTX,
18740 "language specific tree node");
18741 return 0;
18742 }
18743
18744 /* Otherwise this is a generic code; we should just lists all of
18745 these explicitly. We forgot one. */
18746 if (flag_checking)
18747 gcc_unreachable ();
18748
18749 /* In a release build, we want to degrade gracefully: better to
18750 generate incomplete debugging information than to crash. */
18751 return NULL;
18752 }
18753
18754 if (!ret && !list_ret)
18755 return 0;
18756
18757 if (want_address == 2 && !have_address
18758 && (dwarf_version >= 4 || !dwarf_strict))
18759 {
18760 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18761 {
18762 expansion_failed (loc, NULL_RTX,
18763 "DWARF address size mismatch");
18764 return 0;
18765 }
18766 if (ret)
18767 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18768 else
18769 add_loc_descr_to_each (list_ret,
18770 new_loc_descr (DW_OP_stack_value, 0, 0));
18771 have_address = 1;
18772 }
18773 /* Show if we can't fill the request for an address. */
18774 if (want_address && !have_address)
18775 {
18776 expansion_failed (loc, NULL_RTX,
18777 "Want address and only have value");
18778 return 0;
18779 }
18780
18781 gcc_assert (!ret || !list_ret);
18782
18783 /* If we've got an address and don't want one, dereference. */
18784 if (!want_address && have_address)
18785 {
18786 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18787
18788 if (size > DWARF2_ADDR_SIZE || size == -1)
18789 {
18790 expansion_failed (loc, NULL_RTX,
18791 "DWARF address size mismatch");
18792 return 0;
18793 }
18794 else if (size == DWARF2_ADDR_SIZE)
18795 op = DW_OP_deref;
18796 else
18797 op = DW_OP_deref_size;
18798
18799 if (ret)
18800 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18801 else
18802 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18803 }
18804 if (ret)
18805 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18806
18807 return list_ret;
18808 }
18809
18810 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18811 expressions. */
18812
18813 static dw_loc_list_ref
18814 loc_list_from_tree (tree loc, int want_address,
18815 struct loc_descr_context *context)
18816 {
18817 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18818
18819 for (dw_loc_list_ref loc_cur = result;
18820 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18821 loc_descr_without_nops (loc_cur->expr);
18822 return result;
18823 }
18824
18825 /* Same as above but return only single location expression. */
18826 static dw_loc_descr_ref
18827 loc_descriptor_from_tree (tree loc, int want_address,
18828 struct loc_descr_context *context)
18829 {
18830 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18831 if (!ret)
18832 return NULL;
18833 if (ret->dw_loc_next)
18834 {
18835 expansion_failed (loc, NULL_RTX,
18836 "Location list where only loc descriptor needed");
18837 return NULL;
18838 }
18839 return ret->expr;
18840 }
18841
18842 /* Given a value, round it up to the lowest multiple of `boundary'
18843 which is not less than the value itself. */
18844
18845 static inline HOST_WIDE_INT
18846 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18847 {
18848 return (((value + boundary - 1) / boundary) * boundary);
18849 }
18850
18851 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18852 pointer to the declared type for the relevant field variable, or return
18853 `integer_type_node' if the given node turns out to be an
18854 ERROR_MARK node. */
18855
18856 static inline tree
18857 field_type (const_tree decl)
18858 {
18859 tree type;
18860
18861 if (TREE_CODE (decl) == ERROR_MARK)
18862 return integer_type_node;
18863
18864 type = DECL_BIT_FIELD_TYPE (decl);
18865 if (type == NULL_TREE)
18866 type = TREE_TYPE (decl);
18867
18868 return type;
18869 }
18870
18871 /* Given a pointer to a tree node, return the alignment in bits for
18872 it, or else return BITS_PER_WORD if the node actually turns out to
18873 be an ERROR_MARK node. */
18874
18875 static inline unsigned
18876 simple_type_align_in_bits (const_tree type)
18877 {
18878 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18879 }
18880
18881 static inline unsigned
18882 simple_decl_align_in_bits (const_tree decl)
18883 {
18884 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18885 }
18886
18887 /* Return the result of rounding T up to ALIGN. */
18888
18889 static inline offset_int
18890 round_up_to_align (const offset_int &t, unsigned int align)
18891 {
18892 return wi::udiv_trunc (t + align - 1, align) * align;
18893 }
18894
18895 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18896 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18897 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18898 if we fail to return the size in one of these two forms. */
18899
18900 static dw_loc_descr_ref
18901 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18902 {
18903 tree tree_size;
18904 struct loc_descr_context ctx;
18905
18906 /* Return a constant integer in priority, if possible. */
18907 *cst_size = int_size_in_bytes (type);
18908 if (*cst_size != -1)
18909 return NULL;
18910
18911 ctx.context_type = const_cast<tree> (type);
18912 ctx.base_decl = NULL_TREE;
18913 ctx.dpi = NULL;
18914 ctx.placeholder_arg = false;
18915 ctx.placeholder_seen = false;
18916
18917 type = TYPE_MAIN_VARIANT (type);
18918 tree_size = TYPE_SIZE_UNIT (type);
18919 return ((tree_size != NULL_TREE)
18920 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18921 : NULL);
18922 }
18923
18924 /* Helper structure for RECORD_TYPE processing. */
18925 struct vlr_context
18926 {
18927 /* Root RECORD_TYPE. It is needed to generate data member location
18928 descriptions in variable-length records (VLR), but also to cope with
18929 variants, which are composed of nested structures multiplexed with
18930 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18931 function processing a FIELD_DECL, it is required to be non null. */
18932 tree struct_type;
18933 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18934 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18935 this variant part as part of the root record (in storage units). For
18936 regular records, it must be NULL_TREE. */
18937 tree variant_part_offset;
18938 };
18939
18940 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18941 addressed byte of the "containing object" for the given FIELD_DECL. If
18942 possible, return a native constant through CST_OFFSET (in which case NULL is
18943 returned); otherwise return a DWARF expression that computes the offset.
18944
18945 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18946 that offset is, either because the argument turns out to be a pointer to an
18947 ERROR_MARK node, or because the offset expression is too complex for us.
18948
18949 CTX is required: see the comment for VLR_CONTEXT. */
18950
18951 static dw_loc_descr_ref
18952 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18953 HOST_WIDE_INT *cst_offset)
18954 {
18955 tree tree_result;
18956 dw_loc_list_ref loc_result;
18957
18958 *cst_offset = 0;
18959
18960 if (TREE_CODE (decl) == ERROR_MARK)
18961 return NULL;
18962 else
18963 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18964
18965 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18966 case. */
18967 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18968 return NULL;
18969
18970 #ifdef PCC_BITFIELD_TYPE_MATTERS
18971 /* We used to handle only constant offsets in all cases. Now, we handle
18972 properly dynamic byte offsets only when PCC bitfield type doesn't
18973 matter. */
18974 if (PCC_BITFIELD_TYPE_MATTERS
18975 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18976 {
18977 offset_int object_offset_in_bits;
18978 offset_int object_offset_in_bytes;
18979 offset_int bitpos_int;
18980 tree type;
18981 tree field_size_tree;
18982 offset_int deepest_bitpos;
18983 offset_int field_size_in_bits;
18984 unsigned int type_align_in_bits;
18985 unsigned int decl_align_in_bits;
18986 offset_int type_size_in_bits;
18987
18988 bitpos_int = wi::to_offset (bit_position (decl));
18989 type = field_type (decl);
18990 type_size_in_bits = offset_int_type_size_in_bits (type);
18991 type_align_in_bits = simple_type_align_in_bits (type);
18992
18993 field_size_tree = DECL_SIZE (decl);
18994
18995 /* The size could be unspecified if there was an error, or for
18996 a flexible array member. */
18997 if (!field_size_tree)
18998 field_size_tree = bitsize_zero_node;
18999
19000 /* If the size of the field is not constant, use the type size. */
19001 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19002 field_size_in_bits = wi::to_offset (field_size_tree);
19003 else
19004 field_size_in_bits = type_size_in_bits;
19005
19006 decl_align_in_bits = simple_decl_align_in_bits (decl);
19007
19008 /* The GCC front-end doesn't make any attempt to keep track of the
19009 starting bit offset (relative to the start of the containing
19010 structure type) of the hypothetical "containing object" for a
19011 bit-field. Thus, when computing the byte offset value for the
19012 start of the "containing object" of a bit-field, we must deduce
19013 this information on our own. This can be rather tricky to do in
19014 some cases. For example, handling the following structure type
19015 definition when compiling for an i386/i486 target (which only
19016 aligns long long's to 32-bit boundaries) can be very tricky:
19017
19018 struct S { int field1; long long field2:31; };
19019
19020 Fortunately, there is a simple rule-of-thumb which can be used
19021 in such cases. When compiling for an i386/i486, GCC will
19022 allocate 8 bytes for the structure shown above. It decides to
19023 do this based upon one simple rule for bit-field allocation.
19024 GCC allocates each "containing object" for each bit-field at
19025 the first (i.e. lowest addressed) legitimate alignment boundary
19026 (based upon the required minimum alignment for the declared
19027 type of the field) which it can possibly use, subject to the
19028 condition that there is still enough available space remaining
19029 in the containing object (when allocated at the selected point)
19030 to fully accommodate all of the bits of the bit-field itself.
19031
19032 This simple rule makes it obvious why GCC allocates 8 bytes for
19033 each object of the structure type shown above. When looking
19034 for a place to allocate the "containing object" for `field2',
19035 the compiler simply tries to allocate a 64-bit "containing
19036 object" at each successive 32-bit boundary (starting at zero)
19037 until it finds a place to allocate that 64- bit field such that
19038 at least 31 contiguous (and previously unallocated) bits remain
19039 within that selected 64 bit field. (As it turns out, for the
19040 example above, the compiler finds it is OK to allocate the
19041 "containing object" 64-bit field at bit-offset zero within the
19042 structure type.)
19043
19044 Here we attempt to work backwards from the limited set of facts
19045 we're given, and we try to deduce from those facts, where GCC
19046 must have believed that the containing object started (within
19047 the structure type). The value we deduce is then used (by the
19048 callers of this routine) to generate DW_AT_location and
19049 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19050 the case of DW_AT_location, regular fields as well). */
19051
19052 /* Figure out the bit-distance from the start of the structure to
19053 the "deepest" bit of the bit-field. */
19054 deepest_bitpos = bitpos_int + field_size_in_bits;
19055
19056 /* This is the tricky part. Use some fancy footwork to deduce
19057 where the lowest addressed bit of the containing object must
19058 be. */
19059 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19060
19061 /* Round up to type_align by default. This works best for
19062 bitfields. */
19063 object_offset_in_bits
19064 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19065
19066 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19067 {
19068 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19069
19070 /* Round up to decl_align instead. */
19071 object_offset_in_bits
19072 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19073 }
19074
19075 object_offset_in_bytes
19076 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19077 if (ctx->variant_part_offset == NULL_TREE)
19078 {
19079 *cst_offset = object_offset_in_bytes.to_shwi ();
19080 return NULL;
19081 }
19082 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19083 }
19084 else
19085 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19086 tree_result = byte_position (decl);
19087
19088 if (ctx->variant_part_offset != NULL_TREE)
19089 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19090 ctx->variant_part_offset, tree_result);
19091
19092 /* If the byte offset is a constant, it's simplier to handle a native
19093 constant rather than a DWARF expression. */
19094 if (TREE_CODE (tree_result) == INTEGER_CST)
19095 {
19096 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19097 return NULL;
19098 }
19099 struct loc_descr_context loc_ctx = {
19100 ctx->struct_type, /* context_type */
19101 NULL_TREE, /* base_decl */
19102 NULL, /* dpi */
19103 false, /* placeholder_arg */
19104 false /* placeholder_seen */
19105 };
19106 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19107
19108 /* We want a DWARF expression: abort if we only have a location list with
19109 multiple elements. */
19110 if (!loc_result || !single_element_loc_list_p (loc_result))
19111 return NULL;
19112 else
19113 return loc_result->expr;
19114 }
19115 \f
19116 /* The following routines define various Dwarf attributes and any data
19117 associated with them. */
19118
19119 /* Add a location description attribute value to a DIE.
19120
19121 This emits location attributes suitable for whole variables and
19122 whole parameters. Note that the location attributes for struct fields are
19123 generated by the routine `data_member_location_attribute' below. */
19124
19125 static inline void
19126 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19127 dw_loc_list_ref descr)
19128 {
19129 bool check_no_locviews = true;
19130 if (descr == 0)
19131 return;
19132 if (single_element_loc_list_p (descr))
19133 add_AT_loc (die, attr_kind, descr->expr);
19134 else
19135 {
19136 add_AT_loc_list (die, attr_kind, descr);
19137 gcc_assert (descr->ll_symbol);
19138 if (attr_kind == DW_AT_location && descr->vl_symbol
19139 && dwarf2out_locviews_in_attribute ())
19140 {
19141 add_AT_view_list (die, DW_AT_GNU_locviews);
19142 check_no_locviews = false;
19143 }
19144 }
19145
19146 if (check_no_locviews)
19147 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19148 }
19149
19150 /* Add DW_AT_accessibility attribute to DIE if needed. */
19151
19152 static void
19153 add_accessibility_attribute (dw_die_ref die, tree decl)
19154 {
19155 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19156 children, otherwise the default is DW_ACCESS_public. In DWARF2
19157 the default has always been DW_ACCESS_public. */
19158 if (TREE_PROTECTED (decl))
19159 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19160 else if (TREE_PRIVATE (decl))
19161 {
19162 if (dwarf_version == 2
19163 || die->die_parent == NULL
19164 || die->die_parent->die_tag != DW_TAG_class_type)
19165 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19166 }
19167 else if (dwarf_version > 2
19168 && die->die_parent
19169 && die->die_parent->die_tag == DW_TAG_class_type)
19170 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19171 }
19172
19173 /* Attach the specialized form of location attribute used for data members of
19174 struct and union types. In the special case of a FIELD_DECL node which
19175 represents a bit-field, the "offset" part of this special location
19176 descriptor must indicate the distance in bytes from the lowest-addressed
19177 byte of the containing struct or union type to the lowest-addressed byte of
19178 the "containing object" for the bit-field. (See the `field_byte_offset'
19179 function above).
19180
19181 For any given bit-field, the "containing object" is a hypothetical object
19182 (of some integral or enum type) within which the given bit-field lives. The
19183 type of this hypothetical "containing object" is always the same as the
19184 declared type of the individual bit-field itself (for GCC anyway... the
19185 DWARF spec doesn't actually mandate this). Note that it is the size (in
19186 bytes) of the hypothetical "containing object" which will be given in the
19187 DW_AT_byte_size attribute for this bit-field. (See the
19188 `byte_size_attribute' function below.) It is also used when calculating the
19189 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19190 function below.)
19191
19192 CTX is required: see the comment for VLR_CONTEXT. */
19193
19194 static void
19195 add_data_member_location_attribute (dw_die_ref die,
19196 tree decl,
19197 struct vlr_context *ctx)
19198 {
19199 HOST_WIDE_INT offset;
19200 dw_loc_descr_ref loc_descr = 0;
19201
19202 if (TREE_CODE (decl) == TREE_BINFO)
19203 {
19204 /* We're working on the TAG_inheritance for a base class. */
19205 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19206 {
19207 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19208 aren't at a fixed offset from all (sub)objects of the same
19209 type. We need to extract the appropriate offset from our
19210 vtable. The following dwarf expression means
19211
19212 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19213
19214 This is specific to the V3 ABI, of course. */
19215
19216 dw_loc_descr_ref tmp;
19217
19218 /* Make a copy of the object address. */
19219 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19220 add_loc_descr (&loc_descr, tmp);
19221
19222 /* Extract the vtable address. */
19223 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19224 add_loc_descr (&loc_descr, tmp);
19225
19226 /* Calculate the address of the offset. */
19227 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19228 gcc_assert (offset < 0);
19229
19230 tmp = int_loc_descriptor (-offset);
19231 add_loc_descr (&loc_descr, tmp);
19232 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19233 add_loc_descr (&loc_descr, tmp);
19234
19235 /* Extract the offset. */
19236 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19237 add_loc_descr (&loc_descr, tmp);
19238
19239 /* Add it to the object address. */
19240 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19241 add_loc_descr (&loc_descr, tmp);
19242 }
19243 else
19244 offset = tree_to_shwi (BINFO_OFFSET (decl));
19245 }
19246 else
19247 {
19248 loc_descr = field_byte_offset (decl, ctx, &offset);
19249
19250 /* If loc_descr is available then we know the field offset is dynamic.
19251 However, GDB does not handle dynamic field offsets very well at the
19252 moment. */
19253 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19254 {
19255 loc_descr = NULL;
19256 offset = 0;
19257 }
19258
19259 /* Data member location evalutation starts with the base address on the
19260 stack. Compute the field offset and add it to this base address. */
19261 else if (loc_descr != NULL)
19262 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19263 }
19264
19265 if (! loc_descr)
19266 {
19267 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19268 e.g. GDB only added support to it in November 2016. For DWARF5
19269 we need newer debug info consumers anyway. We might change this
19270 to dwarf_version >= 4 once most consumers catched up. */
19271 if (dwarf_version >= 5
19272 && TREE_CODE (decl) == FIELD_DECL
19273 && DECL_BIT_FIELD_TYPE (decl))
19274 {
19275 tree off = bit_position (decl);
19276 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19277 {
19278 remove_AT (die, DW_AT_byte_size);
19279 remove_AT (die, DW_AT_bit_offset);
19280 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19281 return;
19282 }
19283 }
19284 if (dwarf_version > 2)
19285 {
19286 /* Don't need to output a location expression, just the constant. */
19287 if (offset < 0)
19288 add_AT_int (die, DW_AT_data_member_location, offset);
19289 else
19290 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19291 return;
19292 }
19293 else
19294 {
19295 enum dwarf_location_atom op;
19296
19297 /* The DWARF2 standard says that we should assume that the structure
19298 address is already on the stack, so we can specify a structure
19299 field address by using DW_OP_plus_uconst. */
19300 op = DW_OP_plus_uconst;
19301 loc_descr = new_loc_descr (op, offset, 0);
19302 }
19303 }
19304
19305 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19306 }
19307
19308 /* Writes integer values to dw_vec_const array. */
19309
19310 static void
19311 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19312 {
19313 while (size != 0)
19314 {
19315 *dest++ = val & 0xff;
19316 val >>= 8;
19317 --size;
19318 }
19319 }
19320
19321 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19322
19323 static HOST_WIDE_INT
19324 extract_int (const unsigned char *src, unsigned int size)
19325 {
19326 HOST_WIDE_INT val = 0;
19327
19328 src += size;
19329 while (size != 0)
19330 {
19331 val <<= 8;
19332 val |= *--src & 0xff;
19333 --size;
19334 }
19335 return val;
19336 }
19337
19338 /* Writes wide_int values to dw_vec_const array. */
19339
19340 static void
19341 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19342 {
19343 int i;
19344
19345 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19346 {
19347 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19348 return;
19349 }
19350
19351 /* We'd have to extend this code to support odd sizes. */
19352 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19353
19354 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19355
19356 if (WORDS_BIG_ENDIAN)
19357 for (i = n - 1; i >= 0; i--)
19358 {
19359 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19360 dest += sizeof (HOST_WIDE_INT);
19361 }
19362 else
19363 for (i = 0; i < n; i++)
19364 {
19365 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19366 dest += sizeof (HOST_WIDE_INT);
19367 }
19368 }
19369
19370 /* Writes floating point values to dw_vec_const array. */
19371
19372 static void
19373 insert_float (const_rtx rtl, unsigned char *array)
19374 {
19375 long val[4];
19376 int i;
19377 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19378
19379 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19380
19381 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19382 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19383 {
19384 insert_int (val[i], 4, array);
19385 array += 4;
19386 }
19387 }
19388
19389 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19390 does not have a "location" either in memory or in a register. These
19391 things can arise in GNU C when a constant is passed as an actual parameter
19392 to an inlined function. They can also arise in C++ where declared
19393 constants do not necessarily get memory "homes". */
19394
19395 static bool
19396 add_const_value_attribute (dw_die_ref die, rtx rtl)
19397 {
19398 switch (GET_CODE (rtl))
19399 {
19400 case CONST_INT:
19401 {
19402 HOST_WIDE_INT val = INTVAL (rtl);
19403
19404 if (val < 0)
19405 add_AT_int (die, DW_AT_const_value, val);
19406 else
19407 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19408 }
19409 return true;
19410
19411 case CONST_WIDE_INT:
19412 {
19413 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19414 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19415 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19416 wide_int w = wi::zext (w1, prec);
19417 add_AT_wide (die, DW_AT_const_value, w);
19418 }
19419 return true;
19420
19421 case CONST_DOUBLE:
19422 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19423 floating-point constant. A CONST_DOUBLE is used whenever the
19424 constant requires more than one word in order to be adequately
19425 represented. */
19426 if (TARGET_SUPPORTS_WIDE_INT == 0
19427 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19428 add_AT_double (die, DW_AT_const_value,
19429 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19430 else
19431 {
19432 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19433 unsigned int length = GET_MODE_SIZE (mode);
19434 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19435
19436 insert_float (rtl, array);
19437 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19438 }
19439 return true;
19440
19441 case CONST_VECTOR:
19442 {
19443 unsigned int length;
19444 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19445 return false;
19446
19447 machine_mode mode = GET_MODE (rtl);
19448 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19449 unsigned char *array
19450 = ggc_vec_alloc<unsigned char> (length * elt_size);
19451 unsigned int i;
19452 unsigned char *p;
19453 machine_mode imode = GET_MODE_INNER (mode);
19454
19455 switch (GET_MODE_CLASS (mode))
19456 {
19457 case MODE_VECTOR_INT:
19458 for (i = 0, p = array; i < length; i++, p += elt_size)
19459 {
19460 rtx elt = CONST_VECTOR_ELT (rtl, i);
19461 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19462 }
19463 break;
19464
19465 case MODE_VECTOR_FLOAT:
19466 for (i = 0, p = array; i < length; i++, p += elt_size)
19467 {
19468 rtx elt = CONST_VECTOR_ELT (rtl, i);
19469 insert_float (elt, p);
19470 }
19471 break;
19472
19473 default:
19474 gcc_unreachable ();
19475 }
19476
19477 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19478 }
19479 return true;
19480
19481 case CONST_STRING:
19482 if (dwarf_version >= 4 || !dwarf_strict)
19483 {
19484 dw_loc_descr_ref loc_result;
19485 resolve_one_addr (&rtl);
19486 rtl_addr:
19487 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19488 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19489 add_AT_loc (die, DW_AT_location, loc_result);
19490 vec_safe_push (used_rtx_array, rtl);
19491 return true;
19492 }
19493 return false;
19494
19495 case CONST:
19496 if (CONSTANT_P (XEXP (rtl, 0)))
19497 return add_const_value_attribute (die, XEXP (rtl, 0));
19498 /* FALLTHROUGH */
19499 case SYMBOL_REF:
19500 if (!const_ok_for_output (rtl))
19501 return false;
19502 /* FALLTHROUGH */
19503 case LABEL_REF:
19504 if (dwarf_version >= 4 || !dwarf_strict)
19505 goto rtl_addr;
19506 return false;
19507
19508 case PLUS:
19509 /* In cases where an inlined instance of an inline function is passed
19510 the address of an `auto' variable (which is local to the caller) we
19511 can get a situation where the DECL_RTL of the artificial local
19512 variable (for the inlining) which acts as a stand-in for the
19513 corresponding formal parameter (of the inline function) will look
19514 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19515 exactly a compile-time constant expression, but it isn't the address
19516 of the (artificial) local variable either. Rather, it represents the
19517 *value* which the artificial local variable always has during its
19518 lifetime. We currently have no way to represent such quasi-constant
19519 values in Dwarf, so for now we just punt and generate nothing. */
19520 return false;
19521
19522 case HIGH:
19523 case CONST_FIXED:
19524 return false;
19525
19526 case MEM:
19527 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19528 && MEM_READONLY_P (rtl)
19529 && GET_MODE (rtl) == BLKmode)
19530 {
19531 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19532 return true;
19533 }
19534 return false;
19535
19536 default:
19537 /* No other kinds of rtx should be possible here. */
19538 gcc_unreachable ();
19539 }
19540 return false;
19541 }
19542
19543 /* Determine whether the evaluation of EXPR references any variables
19544 or functions which aren't otherwise used (and therefore may not be
19545 output). */
19546 static tree
19547 reference_to_unused (tree * tp, int * walk_subtrees,
19548 void * data ATTRIBUTE_UNUSED)
19549 {
19550 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19551 *walk_subtrees = 0;
19552
19553 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19554 && ! TREE_ASM_WRITTEN (*tp))
19555 return *tp;
19556 /* ??? The C++ FE emits debug information for using decls, so
19557 putting gcc_unreachable here falls over. See PR31899. For now
19558 be conservative. */
19559 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19560 return *tp;
19561 else if (VAR_P (*tp))
19562 {
19563 varpool_node *node = varpool_node::get (*tp);
19564 if (!node || !node->definition)
19565 return *tp;
19566 }
19567 else if (TREE_CODE (*tp) == FUNCTION_DECL
19568 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19569 {
19570 /* The call graph machinery must have finished analyzing,
19571 optimizing and gimplifying the CU by now.
19572 So if *TP has no call graph node associated
19573 to it, it means *TP will not be emitted. */
19574 if (!cgraph_node::get (*tp))
19575 return *tp;
19576 }
19577 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19578 return *tp;
19579
19580 return NULL_TREE;
19581 }
19582
19583 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19584 for use in a later add_const_value_attribute call. */
19585
19586 static rtx
19587 rtl_for_decl_init (tree init, tree type)
19588 {
19589 rtx rtl = NULL_RTX;
19590
19591 STRIP_NOPS (init);
19592
19593 /* If a variable is initialized with a string constant without embedded
19594 zeros, build CONST_STRING. */
19595 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19596 {
19597 tree enttype = TREE_TYPE (type);
19598 tree domain = TYPE_DOMAIN (type);
19599 scalar_int_mode mode;
19600
19601 if (is_int_mode (TYPE_MODE (enttype), &mode)
19602 && GET_MODE_SIZE (mode) == 1
19603 && domain
19604 && TYPE_MAX_VALUE (domain)
19605 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19606 && integer_zerop (TYPE_MIN_VALUE (domain))
19607 && compare_tree_int (TYPE_MAX_VALUE (domain),
19608 TREE_STRING_LENGTH (init) - 1) == 0
19609 && ((size_t) TREE_STRING_LENGTH (init)
19610 == strlen (TREE_STRING_POINTER (init)) + 1))
19611 {
19612 rtl = gen_rtx_CONST_STRING (VOIDmode,
19613 ggc_strdup (TREE_STRING_POINTER (init)));
19614 rtl = gen_rtx_MEM (BLKmode, rtl);
19615 MEM_READONLY_P (rtl) = 1;
19616 }
19617 }
19618 /* Other aggregates, and complex values, could be represented using
19619 CONCAT: FIXME! */
19620 else if (AGGREGATE_TYPE_P (type)
19621 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19622 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19623 || TREE_CODE (type) == COMPLEX_TYPE)
19624 ;
19625 /* Vectors only work if their mode is supported by the target.
19626 FIXME: generic vectors ought to work too. */
19627 else if (TREE_CODE (type) == VECTOR_TYPE
19628 && !VECTOR_MODE_P (TYPE_MODE (type)))
19629 ;
19630 /* If the initializer is something that we know will expand into an
19631 immediate RTL constant, expand it now. We must be careful not to
19632 reference variables which won't be output. */
19633 else if (initializer_constant_valid_p (init, type)
19634 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19635 {
19636 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19637 possible. */
19638 if (TREE_CODE (type) == VECTOR_TYPE)
19639 switch (TREE_CODE (init))
19640 {
19641 case VECTOR_CST:
19642 break;
19643 case CONSTRUCTOR:
19644 if (TREE_CONSTANT (init))
19645 {
19646 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19647 bool constant_p = true;
19648 tree value;
19649 unsigned HOST_WIDE_INT ix;
19650
19651 /* Even when ctor is constant, it might contain non-*_CST
19652 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19653 belong into VECTOR_CST nodes. */
19654 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19655 if (!CONSTANT_CLASS_P (value))
19656 {
19657 constant_p = false;
19658 break;
19659 }
19660
19661 if (constant_p)
19662 {
19663 init = build_vector_from_ctor (type, elts);
19664 break;
19665 }
19666 }
19667 /* FALLTHRU */
19668
19669 default:
19670 return NULL;
19671 }
19672
19673 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19674
19675 /* If expand_expr returns a MEM, it wasn't immediate. */
19676 gcc_assert (!rtl || !MEM_P (rtl));
19677 }
19678
19679 return rtl;
19680 }
19681
19682 /* Generate RTL for the variable DECL to represent its location. */
19683
19684 static rtx
19685 rtl_for_decl_location (tree decl)
19686 {
19687 rtx rtl;
19688
19689 /* Here we have to decide where we are going to say the parameter "lives"
19690 (as far as the debugger is concerned). We only have a couple of
19691 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19692
19693 DECL_RTL normally indicates where the parameter lives during most of the
19694 activation of the function. If optimization is enabled however, this
19695 could be either NULL or else a pseudo-reg. Both of those cases indicate
19696 that the parameter doesn't really live anywhere (as far as the code
19697 generation parts of GCC are concerned) during most of the function's
19698 activation. That will happen (for example) if the parameter is never
19699 referenced within the function.
19700
19701 We could just generate a location descriptor here for all non-NULL
19702 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19703 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19704 where DECL_RTL is NULL or is a pseudo-reg.
19705
19706 Note however that we can only get away with using DECL_INCOMING_RTL as
19707 a backup substitute for DECL_RTL in certain limited cases. In cases
19708 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19709 we can be sure that the parameter was passed using the same type as it is
19710 declared to have within the function, and that its DECL_INCOMING_RTL
19711 points us to a place where a value of that type is passed.
19712
19713 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19714 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19715 because in these cases DECL_INCOMING_RTL points us to a value of some
19716 type which is *different* from the type of the parameter itself. Thus,
19717 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19718 such cases, the debugger would end up (for example) trying to fetch a
19719 `float' from a place which actually contains the first part of a
19720 `double'. That would lead to really incorrect and confusing
19721 output at debug-time.
19722
19723 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19724 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19725 are a couple of exceptions however. On little-endian machines we can
19726 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19727 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19728 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19729 when (on a little-endian machine) a non-prototyped function has a
19730 parameter declared to be of type `short' or `char'. In such cases,
19731 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19732 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19733 passed `int' value. If the debugger then uses that address to fetch
19734 a `short' or a `char' (on a little-endian machine) the result will be
19735 the correct data, so we allow for such exceptional cases below.
19736
19737 Note that our goal here is to describe the place where the given formal
19738 parameter lives during most of the function's activation (i.e. between the
19739 end of the prologue and the start of the epilogue). We'll do that as best
19740 as we can. Note however that if the given formal parameter is modified
19741 sometime during the execution of the function, then a stack backtrace (at
19742 debug-time) will show the function as having been called with the *new*
19743 value rather than the value which was originally passed in. This happens
19744 rarely enough that it is not a major problem, but it *is* a problem, and
19745 I'd like to fix it.
19746
19747 A future version of dwarf2out.c may generate two additional attributes for
19748 any given DW_TAG_formal_parameter DIE which will describe the "passed
19749 type" and the "passed location" for the given formal parameter in addition
19750 to the attributes we now generate to indicate the "declared type" and the
19751 "active location" for each parameter. This additional set of attributes
19752 could be used by debuggers for stack backtraces. Separately, note that
19753 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19754 This happens (for example) for inlined-instances of inline function formal
19755 parameters which are never referenced. This really shouldn't be
19756 happening. All PARM_DECL nodes should get valid non-NULL
19757 DECL_INCOMING_RTL values. FIXME. */
19758
19759 /* Use DECL_RTL as the "location" unless we find something better. */
19760 rtl = DECL_RTL_IF_SET (decl);
19761
19762 /* When generating abstract instances, ignore everything except
19763 constants, symbols living in memory, and symbols living in
19764 fixed registers. */
19765 if (! reload_completed)
19766 {
19767 if (rtl
19768 && (CONSTANT_P (rtl)
19769 || (MEM_P (rtl)
19770 && CONSTANT_P (XEXP (rtl, 0)))
19771 || (REG_P (rtl)
19772 && VAR_P (decl)
19773 && TREE_STATIC (decl))))
19774 {
19775 rtl = targetm.delegitimize_address (rtl);
19776 return rtl;
19777 }
19778 rtl = NULL_RTX;
19779 }
19780 else if (TREE_CODE (decl) == PARM_DECL)
19781 {
19782 if (rtl == NULL_RTX
19783 || is_pseudo_reg (rtl)
19784 || (MEM_P (rtl)
19785 && is_pseudo_reg (XEXP (rtl, 0))
19786 && DECL_INCOMING_RTL (decl)
19787 && MEM_P (DECL_INCOMING_RTL (decl))
19788 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19789 {
19790 tree declared_type = TREE_TYPE (decl);
19791 tree passed_type = DECL_ARG_TYPE (decl);
19792 machine_mode dmode = TYPE_MODE (declared_type);
19793 machine_mode pmode = TYPE_MODE (passed_type);
19794
19795 /* This decl represents a formal parameter which was optimized out.
19796 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19797 all cases where (rtl == NULL_RTX) just below. */
19798 if (dmode == pmode)
19799 rtl = DECL_INCOMING_RTL (decl);
19800 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19801 && SCALAR_INT_MODE_P (dmode)
19802 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19803 && DECL_INCOMING_RTL (decl))
19804 {
19805 rtx inc = DECL_INCOMING_RTL (decl);
19806 if (REG_P (inc))
19807 rtl = inc;
19808 else if (MEM_P (inc))
19809 {
19810 if (BYTES_BIG_ENDIAN)
19811 rtl = adjust_address_nv (inc, dmode,
19812 GET_MODE_SIZE (pmode)
19813 - GET_MODE_SIZE (dmode));
19814 else
19815 rtl = inc;
19816 }
19817 }
19818 }
19819
19820 /* If the parm was passed in registers, but lives on the stack, then
19821 make a big endian correction if the mode of the type of the
19822 parameter is not the same as the mode of the rtl. */
19823 /* ??? This is the same series of checks that are made in dbxout.c before
19824 we reach the big endian correction code there. It isn't clear if all
19825 of these checks are necessary here, but keeping them all is the safe
19826 thing to do. */
19827 else if (MEM_P (rtl)
19828 && XEXP (rtl, 0) != const0_rtx
19829 && ! CONSTANT_P (XEXP (rtl, 0))
19830 /* Not passed in memory. */
19831 && !MEM_P (DECL_INCOMING_RTL (decl))
19832 /* Not passed by invisible reference. */
19833 && (!REG_P (XEXP (rtl, 0))
19834 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19835 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19836 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19837 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19838 #endif
19839 )
19840 /* Big endian correction check. */
19841 && BYTES_BIG_ENDIAN
19842 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19843 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19844 UNITS_PER_WORD))
19845 {
19846 machine_mode addr_mode = get_address_mode (rtl);
19847 poly_int64 offset = (UNITS_PER_WORD
19848 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19849
19850 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19851 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19852 }
19853 }
19854 else if (VAR_P (decl)
19855 && rtl
19856 && MEM_P (rtl)
19857 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19858 {
19859 machine_mode addr_mode = get_address_mode (rtl);
19860 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19861 GET_MODE (rtl));
19862
19863 /* If a variable is declared "register" yet is smaller than
19864 a register, then if we store the variable to memory, it
19865 looks like we're storing a register-sized value, when in
19866 fact we are not. We need to adjust the offset of the
19867 storage location to reflect the actual value's bytes,
19868 else gdb will not be able to display it. */
19869 if (maybe_ne (offset, 0))
19870 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19871 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19872 }
19873
19874 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19875 and will have been substituted directly into all expressions that use it.
19876 C does not have such a concept, but C++ and other languages do. */
19877 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19878 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19879
19880 if (rtl)
19881 rtl = targetm.delegitimize_address (rtl);
19882
19883 /* If we don't look past the constant pool, we risk emitting a
19884 reference to a constant pool entry that isn't referenced from
19885 code, and thus is not emitted. */
19886 if (rtl)
19887 rtl = avoid_constant_pool_reference (rtl);
19888
19889 /* Try harder to get a rtl. If this symbol ends up not being emitted
19890 in the current CU, resolve_addr will remove the expression referencing
19891 it. */
19892 if (rtl == NULL_RTX
19893 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19894 && VAR_P (decl)
19895 && !DECL_EXTERNAL (decl)
19896 && TREE_STATIC (decl)
19897 && DECL_NAME (decl)
19898 && !DECL_HARD_REGISTER (decl)
19899 && DECL_MODE (decl) != VOIDmode)
19900 {
19901 rtl = make_decl_rtl_for_debug (decl);
19902 if (!MEM_P (rtl)
19903 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19904 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19905 rtl = NULL_RTX;
19906 }
19907
19908 return rtl;
19909 }
19910
19911 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19912 returned. If so, the decl for the COMMON block is returned, and the
19913 value is the offset into the common block for the symbol. */
19914
19915 static tree
19916 fortran_common (tree decl, HOST_WIDE_INT *value)
19917 {
19918 tree val_expr, cvar;
19919 machine_mode mode;
19920 poly_int64 bitsize, bitpos;
19921 tree offset;
19922 HOST_WIDE_INT cbitpos;
19923 int unsignedp, reversep, volatilep = 0;
19924
19925 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19926 it does not have a value (the offset into the common area), or if it
19927 is thread local (as opposed to global) then it isn't common, and shouldn't
19928 be handled as such. */
19929 if (!VAR_P (decl)
19930 || !TREE_STATIC (decl)
19931 || !DECL_HAS_VALUE_EXPR_P (decl)
19932 || !is_fortran ())
19933 return NULL_TREE;
19934
19935 val_expr = DECL_VALUE_EXPR (decl);
19936 if (TREE_CODE (val_expr) != COMPONENT_REF)
19937 return NULL_TREE;
19938
19939 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19940 &unsignedp, &reversep, &volatilep);
19941
19942 if (cvar == NULL_TREE
19943 || !VAR_P (cvar)
19944 || DECL_ARTIFICIAL (cvar)
19945 || !TREE_PUBLIC (cvar)
19946 /* We don't expect to have to cope with variable offsets,
19947 since at present all static data must have a constant size. */
19948 || !bitpos.is_constant (&cbitpos))
19949 return NULL_TREE;
19950
19951 *value = 0;
19952 if (offset != NULL)
19953 {
19954 if (!tree_fits_shwi_p (offset))
19955 return NULL_TREE;
19956 *value = tree_to_shwi (offset);
19957 }
19958 if (cbitpos != 0)
19959 *value += cbitpos / BITS_PER_UNIT;
19960
19961 return cvar;
19962 }
19963
19964 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19965 data attribute for a variable or a parameter. We generate the
19966 DW_AT_const_value attribute only in those cases where the given variable
19967 or parameter does not have a true "location" either in memory or in a
19968 register. This can happen (for example) when a constant is passed as an
19969 actual argument in a call to an inline function. (It's possible that
19970 these things can crop up in other ways also.) Note that one type of
19971 constant value which can be passed into an inlined function is a constant
19972 pointer. This can happen for example if an actual argument in an inlined
19973 function call evaluates to a compile-time constant address.
19974
19975 CACHE_P is true if it is worth caching the location list for DECL,
19976 so that future calls can reuse it rather than regenerate it from scratch.
19977 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19978 since we will need to refer to them each time the function is inlined. */
19979
19980 static bool
19981 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19982 {
19983 rtx rtl;
19984 dw_loc_list_ref list;
19985 var_loc_list *loc_list;
19986 cached_dw_loc_list *cache;
19987
19988 if (early_dwarf)
19989 return false;
19990
19991 if (TREE_CODE (decl) == ERROR_MARK)
19992 return false;
19993
19994 if (get_AT (die, DW_AT_location)
19995 || get_AT (die, DW_AT_const_value))
19996 return true;
19997
19998 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19999 || TREE_CODE (decl) == RESULT_DECL);
20000
20001 /* Try to get some constant RTL for this decl, and use that as the value of
20002 the location. */
20003
20004 rtl = rtl_for_decl_location (decl);
20005 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20006 && add_const_value_attribute (die, rtl))
20007 return true;
20008
20009 /* See if we have single element location list that is equivalent to
20010 a constant value. That way we are better to use add_const_value_attribute
20011 rather than expanding constant value equivalent. */
20012 loc_list = lookup_decl_loc (decl);
20013 if (loc_list
20014 && loc_list->first
20015 && loc_list->first->next == NULL
20016 && NOTE_P (loc_list->first->loc)
20017 && NOTE_VAR_LOCATION (loc_list->first->loc)
20018 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20019 {
20020 struct var_loc_node *node;
20021
20022 node = loc_list->first;
20023 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20024 if (GET_CODE (rtl) == EXPR_LIST)
20025 rtl = XEXP (rtl, 0);
20026 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20027 && add_const_value_attribute (die, rtl))
20028 return true;
20029 }
20030 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20031 list several times. See if we've already cached the contents. */
20032 list = NULL;
20033 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20034 cache_p = false;
20035 if (cache_p)
20036 {
20037 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20038 if (cache)
20039 list = cache->loc_list;
20040 }
20041 if (list == NULL)
20042 {
20043 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20044 NULL);
20045 /* It is usually worth caching this result if the decl is from
20046 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20047 if (cache_p && list && list->dw_loc_next)
20048 {
20049 cached_dw_loc_list **slot
20050 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20051 DECL_UID (decl),
20052 INSERT);
20053 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20054 cache->decl_id = DECL_UID (decl);
20055 cache->loc_list = list;
20056 *slot = cache;
20057 }
20058 }
20059 if (list)
20060 {
20061 add_AT_location_description (die, DW_AT_location, list);
20062 return true;
20063 }
20064 /* None of that worked, so it must not really have a location;
20065 try adding a constant value attribute from the DECL_INITIAL. */
20066 return tree_add_const_value_attribute_for_decl (die, decl);
20067 }
20068
20069 /* Helper function for tree_add_const_value_attribute. Natively encode
20070 initializer INIT into an array. Return true if successful. */
20071
20072 static bool
20073 native_encode_initializer (tree init, unsigned char *array, int size)
20074 {
20075 tree type;
20076
20077 if (init == NULL_TREE)
20078 return false;
20079
20080 STRIP_NOPS (init);
20081 switch (TREE_CODE (init))
20082 {
20083 case STRING_CST:
20084 type = TREE_TYPE (init);
20085 if (TREE_CODE (type) == ARRAY_TYPE)
20086 {
20087 tree enttype = TREE_TYPE (type);
20088 scalar_int_mode mode;
20089
20090 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20091 || GET_MODE_SIZE (mode) != 1)
20092 return false;
20093 if (int_size_in_bytes (type) != size)
20094 return false;
20095 if (size > TREE_STRING_LENGTH (init))
20096 {
20097 memcpy (array, TREE_STRING_POINTER (init),
20098 TREE_STRING_LENGTH (init));
20099 memset (array + TREE_STRING_LENGTH (init),
20100 '\0', size - TREE_STRING_LENGTH (init));
20101 }
20102 else
20103 memcpy (array, TREE_STRING_POINTER (init), size);
20104 return true;
20105 }
20106 return false;
20107 case CONSTRUCTOR:
20108 type = TREE_TYPE (init);
20109 if (int_size_in_bytes (type) != size)
20110 return false;
20111 if (TREE_CODE (type) == ARRAY_TYPE)
20112 {
20113 HOST_WIDE_INT min_index;
20114 unsigned HOST_WIDE_INT cnt;
20115 int curpos = 0, fieldsize;
20116 constructor_elt *ce;
20117
20118 if (TYPE_DOMAIN (type) == NULL_TREE
20119 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20120 return false;
20121
20122 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20123 if (fieldsize <= 0)
20124 return false;
20125
20126 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20127 memset (array, '\0', size);
20128 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20129 {
20130 tree val = ce->value;
20131 tree index = ce->index;
20132 int pos = curpos;
20133 if (index && TREE_CODE (index) == RANGE_EXPR)
20134 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20135 * fieldsize;
20136 else if (index)
20137 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20138
20139 if (val)
20140 {
20141 STRIP_NOPS (val);
20142 if (!native_encode_initializer (val, array + pos, fieldsize))
20143 return false;
20144 }
20145 curpos = pos + fieldsize;
20146 if (index && TREE_CODE (index) == RANGE_EXPR)
20147 {
20148 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20149 - tree_to_shwi (TREE_OPERAND (index, 0));
20150 while (count-- > 0)
20151 {
20152 if (val)
20153 memcpy (array + curpos, array + pos, fieldsize);
20154 curpos += fieldsize;
20155 }
20156 }
20157 gcc_assert (curpos <= size);
20158 }
20159 return true;
20160 }
20161 else if (TREE_CODE (type) == RECORD_TYPE
20162 || TREE_CODE (type) == UNION_TYPE)
20163 {
20164 tree field = NULL_TREE;
20165 unsigned HOST_WIDE_INT cnt;
20166 constructor_elt *ce;
20167
20168 if (int_size_in_bytes (type) != size)
20169 return false;
20170
20171 if (TREE_CODE (type) == RECORD_TYPE)
20172 field = TYPE_FIELDS (type);
20173
20174 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20175 {
20176 tree val = ce->value;
20177 int pos, fieldsize;
20178
20179 if (ce->index != 0)
20180 field = ce->index;
20181
20182 if (val)
20183 STRIP_NOPS (val);
20184
20185 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20186 return false;
20187
20188 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20189 && TYPE_DOMAIN (TREE_TYPE (field))
20190 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20191 return false;
20192 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20193 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20194 return false;
20195 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20196 pos = int_byte_position (field);
20197 gcc_assert (pos + fieldsize <= size);
20198 if (val && fieldsize != 0
20199 && !native_encode_initializer (val, array + pos, fieldsize))
20200 return false;
20201 }
20202 return true;
20203 }
20204 return false;
20205 case VIEW_CONVERT_EXPR:
20206 case NON_LVALUE_EXPR:
20207 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20208 default:
20209 return native_encode_expr (init, array, size) == size;
20210 }
20211 }
20212
20213 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20214 attribute is the const value T. */
20215
20216 static bool
20217 tree_add_const_value_attribute (dw_die_ref die, tree t)
20218 {
20219 tree init;
20220 tree type = TREE_TYPE (t);
20221 rtx rtl;
20222
20223 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20224 return false;
20225
20226 init = t;
20227 gcc_assert (!DECL_P (init));
20228
20229 if (TREE_CODE (init) == INTEGER_CST)
20230 {
20231 if (tree_fits_uhwi_p (init))
20232 {
20233 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20234 return true;
20235 }
20236 if (tree_fits_shwi_p (init))
20237 {
20238 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20239 return true;
20240 }
20241 }
20242 if (! early_dwarf)
20243 {
20244 rtl = rtl_for_decl_init (init, type);
20245 if (rtl)
20246 return add_const_value_attribute (die, rtl);
20247 }
20248 /* If the host and target are sane, try harder. */
20249 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20250 && initializer_constant_valid_p (init, type))
20251 {
20252 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20253 if (size > 0 && (int) size == size)
20254 {
20255 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20256
20257 if (native_encode_initializer (init, array, size))
20258 {
20259 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20260 return true;
20261 }
20262 ggc_free (array);
20263 }
20264 }
20265 return false;
20266 }
20267
20268 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20269 attribute is the const value of T, where T is an integral constant
20270 variable with static storage duration
20271 (so it can't be a PARM_DECL or a RESULT_DECL). */
20272
20273 static bool
20274 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20275 {
20276
20277 if (!decl
20278 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20279 || (VAR_P (decl) && !TREE_STATIC (decl)))
20280 return false;
20281
20282 if (TREE_READONLY (decl)
20283 && ! TREE_THIS_VOLATILE (decl)
20284 && DECL_INITIAL (decl))
20285 /* OK */;
20286 else
20287 return false;
20288
20289 /* Don't add DW_AT_const_value if abstract origin already has one. */
20290 if (get_AT (var_die, DW_AT_const_value))
20291 return false;
20292
20293 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20294 }
20295
20296 /* Convert the CFI instructions for the current function into a
20297 location list. This is used for DW_AT_frame_base when we targeting
20298 a dwarf2 consumer that does not support the dwarf3
20299 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20300 expressions. */
20301
20302 static dw_loc_list_ref
20303 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20304 {
20305 int ix;
20306 dw_fde_ref fde;
20307 dw_loc_list_ref list, *list_tail;
20308 dw_cfi_ref cfi;
20309 dw_cfa_location last_cfa, next_cfa;
20310 const char *start_label, *last_label, *section;
20311 dw_cfa_location remember;
20312
20313 fde = cfun->fde;
20314 gcc_assert (fde != NULL);
20315
20316 section = secname_for_decl (current_function_decl);
20317 list_tail = &list;
20318 list = NULL;
20319
20320 memset (&next_cfa, 0, sizeof (next_cfa));
20321 next_cfa.reg = INVALID_REGNUM;
20322 remember = next_cfa;
20323
20324 start_label = fde->dw_fde_begin;
20325
20326 /* ??? Bald assumption that the CIE opcode list does not contain
20327 advance opcodes. */
20328 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20329 lookup_cfa_1 (cfi, &next_cfa, &remember);
20330
20331 last_cfa = next_cfa;
20332 last_label = start_label;
20333
20334 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20335 {
20336 /* If the first partition contained no CFI adjustments, the
20337 CIE opcodes apply to the whole first partition. */
20338 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20339 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20340 list_tail =&(*list_tail)->dw_loc_next;
20341 start_label = last_label = fde->dw_fde_second_begin;
20342 }
20343
20344 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20345 {
20346 switch (cfi->dw_cfi_opc)
20347 {
20348 case DW_CFA_set_loc:
20349 case DW_CFA_advance_loc1:
20350 case DW_CFA_advance_loc2:
20351 case DW_CFA_advance_loc4:
20352 if (!cfa_equal_p (&last_cfa, &next_cfa))
20353 {
20354 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20355 start_label, 0, last_label, 0, section);
20356
20357 list_tail = &(*list_tail)->dw_loc_next;
20358 last_cfa = next_cfa;
20359 start_label = last_label;
20360 }
20361 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20362 break;
20363
20364 case DW_CFA_advance_loc:
20365 /* The encoding is complex enough that we should never emit this. */
20366 gcc_unreachable ();
20367
20368 default:
20369 lookup_cfa_1 (cfi, &next_cfa, &remember);
20370 break;
20371 }
20372 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20373 {
20374 if (!cfa_equal_p (&last_cfa, &next_cfa))
20375 {
20376 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20377 start_label, 0, last_label, 0, section);
20378
20379 list_tail = &(*list_tail)->dw_loc_next;
20380 last_cfa = next_cfa;
20381 start_label = last_label;
20382 }
20383 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20384 start_label, 0, fde->dw_fde_end, 0, section);
20385 list_tail = &(*list_tail)->dw_loc_next;
20386 start_label = last_label = fde->dw_fde_second_begin;
20387 }
20388 }
20389
20390 if (!cfa_equal_p (&last_cfa, &next_cfa))
20391 {
20392 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20393 start_label, 0, last_label, 0, section);
20394 list_tail = &(*list_tail)->dw_loc_next;
20395 start_label = last_label;
20396 }
20397
20398 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20399 start_label, 0,
20400 fde->dw_fde_second_begin
20401 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20402 section);
20403
20404 maybe_gen_llsym (list);
20405
20406 return list;
20407 }
20408
20409 /* Compute a displacement from the "steady-state frame pointer" to the
20410 frame base (often the same as the CFA), and store it in
20411 frame_pointer_fb_offset. OFFSET is added to the displacement
20412 before the latter is negated. */
20413
20414 static void
20415 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20416 {
20417 rtx reg, elim;
20418
20419 #ifdef FRAME_POINTER_CFA_OFFSET
20420 reg = frame_pointer_rtx;
20421 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20422 #else
20423 reg = arg_pointer_rtx;
20424 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20425 #endif
20426
20427 elim = (ira_use_lra_p
20428 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20429 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20430 elim = strip_offset_and_add (elim, &offset);
20431
20432 frame_pointer_fb_offset = -offset;
20433
20434 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20435 in which to eliminate. This is because it's stack pointer isn't
20436 directly accessible as a register within the ISA. To work around
20437 this, assume that while we cannot provide a proper value for
20438 frame_pointer_fb_offset, we won't need one either. */
20439 frame_pointer_fb_offset_valid
20440 = ((SUPPORTS_STACK_ALIGNMENT
20441 && (elim == hard_frame_pointer_rtx
20442 || elim == stack_pointer_rtx))
20443 || elim == (frame_pointer_needed
20444 ? hard_frame_pointer_rtx
20445 : stack_pointer_rtx));
20446 }
20447
20448 /* Generate a DW_AT_name attribute given some string value to be included as
20449 the value of the attribute. */
20450
20451 static void
20452 add_name_attribute (dw_die_ref die, const char *name_string)
20453 {
20454 if (name_string != NULL && *name_string != 0)
20455 {
20456 if (demangle_name_func)
20457 name_string = (*demangle_name_func) (name_string);
20458
20459 add_AT_string (die, DW_AT_name, name_string);
20460 }
20461 }
20462
20463 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20464 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20465 of TYPE accordingly.
20466
20467 ??? This is a temporary measure until after we're able to generate
20468 regular DWARF for the complex Ada type system. */
20469
20470 static void
20471 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20472 dw_die_ref context_die)
20473 {
20474 tree dtype;
20475 dw_die_ref dtype_die;
20476
20477 if (!lang_hooks.types.descriptive_type)
20478 return;
20479
20480 dtype = lang_hooks.types.descriptive_type (type);
20481 if (!dtype)
20482 return;
20483
20484 dtype_die = lookup_type_die (dtype);
20485 if (!dtype_die)
20486 {
20487 gen_type_die (dtype, context_die);
20488 dtype_die = lookup_type_die (dtype);
20489 gcc_assert (dtype_die);
20490 }
20491
20492 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20493 }
20494
20495 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20496
20497 static const char *
20498 comp_dir_string (void)
20499 {
20500 const char *wd;
20501 char *wd1;
20502 static const char *cached_wd = NULL;
20503
20504 if (cached_wd != NULL)
20505 return cached_wd;
20506
20507 wd = get_src_pwd ();
20508 if (wd == NULL)
20509 return NULL;
20510
20511 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20512 {
20513 int wdlen;
20514
20515 wdlen = strlen (wd);
20516 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20517 strcpy (wd1, wd);
20518 wd1 [wdlen] = DIR_SEPARATOR;
20519 wd1 [wdlen + 1] = 0;
20520 wd = wd1;
20521 }
20522
20523 cached_wd = remap_debug_filename (wd);
20524 return cached_wd;
20525 }
20526
20527 /* Generate a DW_AT_comp_dir attribute for DIE. */
20528
20529 static void
20530 add_comp_dir_attribute (dw_die_ref die)
20531 {
20532 const char * wd = comp_dir_string ();
20533 if (wd != NULL)
20534 add_AT_string (die, DW_AT_comp_dir, wd);
20535 }
20536
20537 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20538 pointer computation, ...), output a representation for that bound according
20539 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20540 loc_list_from_tree for the meaning of CONTEXT. */
20541
20542 static void
20543 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20544 int forms, struct loc_descr_context *context)
20545 {
20546 dw_die_ref context_die, decl_die;
20547 dw_loc_list_ref list;
20548 bool strip_conversions = true;
20549 bool placeholder_seen = false;
20550
20551 while (strip_conversions)
20552 switch (TREE_CODE (value))
20553 {
20554 case ERROR_MARK:
20555 case SAVE_EXPR:
20556 return;
20557
20558 CASE_CONVERT:
20559 case VIEW_CONVERT_EXPR:
20560 value = TREE_OPERAND (value, 0);
20561 break;
20562
20563 default:
20564 strip_conversions = false;
20565 break;
20566 }
20567
20568 /* If possible and permitted, output the attribute as a constant. */
20569 if ((forms & dw_scalar_form_constant) != 0
20570 && TREE_CODE (value) == INTEGER_CST)
20571 {
20572 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20573
20574 /* If HOST_WIDE_INT is big enough then represent the bound as
20575 a constant value. We need to choose a form based on
20576 whether the type is signed or unsigned. We cannot just
20577 call add_AT_unsigned if the value itself is positive
20578 (add_AT_unsigned might add the unsigned value encoded as
20579 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20580 bounds type and then sign extend any unsigned values found
20581 for signed types. This is needed only for
20582 DW_AT_{lower,upper}_bound, since for most other attributes,
20583 consumers will treat DW_FORM_data[1248] as unsigned values,
20584 regardless of the underlying type. */
20585 if (prec <= HOST_BITS_PER_WIDE_INT
20586 || tree_fits_uhwi_p (value))
20587 {
20588 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20589 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20590 else
20591 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20592 }
20593 else
20594 /* Otherwise represent the bound as an unsigned value with
20595 the precision of its type. The precision and signedness
20596 of the type will be necessary to re-interpret it
20597 unambiguously. */
20598 add_AT_wide (die, attr, wi::to_wide (value));
20599 return;
20600 }
20601
20602 /* Otherwise, if it's possible and permitted too, output a reference to
20603 another DIE. */
20604 if ((forms & dw_scalar_form_reference) != 0)
20605 {
20606 tree decl = NULL_TREE;
20607
20608 /* Some type attributes reference an outer type. For instance, the upper
20609 bound of an array may reference an embedding record (this happens in
20610 Ada). */
20611 if (TREE_CODE (value) == COMPONENT_REF
20612 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20613 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20614 decl = TREE_OPERAND (value, 1);
20615
20616 else if (VAR_P (value)
20617 || TREE_CODE (value) == PARM_DECL
20618 || TREE_CODE (value) == RESULT_DECL)
20619 decl = value;
20620
20621 if (decl != NULL_TREE)
20622 {
20623 dw_die_ref decl_die = lookup_decl_die (decl);
20624
20625 /* ??? Can this happen, or should the variable have been bound
20626 first? Probably it can, since I imagine that we try to create
20627 the types of parameters in the order in which they exist in
20628 the list, and won't have created a forward reference to a
20629 later parameter. */
20630 if (decl_die != NULL)
20631 {
20632 add_AT_die_ref (die, attr, decl_die);
20633 return;
20634 }
20635 }
20636 }
20637
20638 /* Last chance: try to create a stack operation procedure to evaluate the
20639 value. Do nothing if even that is not possible or permitted. */
20640 if ((forms & dw_scalar_form_exprloc) == 0)
20641 return;
20642
20643 list = loc_list_from_tree (value, 2, context);
20644 if (context && context->placeholder_arg)
20645 {
20646 placeholder_seen = context->placeholder_seen;
20647 context->placeholder_seen = false;
20648 }
20649 if (list == NULL || single_element_loc_list_p (list))
20650 {
20651 /* If this attribute is not a reference nor constant, it is
20652 a DWARF expression rather than location description. For that
20653 loc_list_from_tree (value, 0, &context) is needed. */
20654 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20655 if (list2 && single_element_loc_list_p (list2))
20656 {
20657 if (placeholder_seen)
20658 {
20659 struct dwarf_procedure_info dpi;
20660 dpi.fndecl = NULL_TREE;
20661 dpi.args_count = 1;
20662 if (!resolve_args_picking (list2->expr, 1, &dpi))
20663 return;
20664 }
20665 add_AT_loc (die, attr, list2->expr);
20666 return;
20667 }
20668 }
20669
20670 /* If that failed to give a single element location list, fall back to
20671 outputting this as a reference... still if permitted. */
20672 if (list == NULL
20673 || (forms & dw_scalar_form_reference) == 0
20674 || placeholder_seen)
20675 return;
20676
20677 if (current_function_decl == 0)
20678 context_die = comp_unit_die ();
20679 else
20680 context_die = lookup_decl_die (current_function_decl);
20681
20682 decl_die = new_die (DW_TAG_variable, context_die, value);
20683 add_AT_flag (decl_die, DW_AT_artificial, 1);
20684 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20685 context_die);
20686 add_AT_location_description (decl_die, DW_AT_location, list);
20687 add_AT_die_ref (die, attr, decl_die);
20688 }
20689
20690 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20691 default. */
20692
20693 static int
20694 lower_bound_default (void)
20695 {
20696 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20697 {
20698 case DW_LANG_C:
20699 case DW_LANG_C89:
20700 case DW_LANG_C99:
20701 case DW_LANG_C11:
20702 case DW_LANG_C_plus_plus:
20703 case DW_LANG_C_plus_plus_11:
20704 case DW_LANG_C_plus_plus_14:
20705 case DW_LANG_ObjC:
20706 case DW_LANG_ObjC_plus_plus:
20707 return 0;
20708 case DW_LANG_Fortran77:
20709 case DW_LANG_Fortran90:
20710 case DW_LANG_Fortran95:
20711 case DW_LANG_Fortran03:
20712 case DW_LANG_Fortran08:
20713 return 1;
20714 case DW_LANG_UPC:
20715 case DW_LANG_D:
20716 case DW_LANG_Python:
20717 return dwarf_version >= 4 ? 0 : -1;
20718 case DW_LANG_Ada95:
20719 case DW_LANG_Ada83:
20720 case DW_LANG_Cobol74:
20721 case DW_LANG_Cobol85:
20722 case DW_LANG_Modula2:
20723 case DW_LANG_PLI:
20724 return dwarf_version >= 4 ? 1 : -1;
20725 default:
20726 return -1;
20727 }
20728 }
20729
20730 /* Given a tree node describing an array bound (either lower or upper) output
20731 a representation for that bound. */
20732
20733 static void
20734 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20735 tree bound, struct loc_descr_context *context)
20736 {
20737 int dflt;
20738
20739 while (1)
20740 switch (TREE_CODE (bound))
20741 {
20742 /* Strip all conversions. */
20743 CASE_CONVERT:
20744 case VIEW_CONVERT_EXPR:
20745 bound = TREE_OPERAND (bound, 0);
20746 break;
20747
20748 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20749 are even omitted when they are the default. */
20750 case INTEGER_CST:
20751 /* If the value for this bound is the default one, we can even omit the
20752 attribute. */
20753 if (bound_attr == DW_AT_lower_bound
20754 && tree_fits_shwi_p (bound)
20755 && (dflt = lower_bound_default ()) != -1
20756 && tree_to_shwi (bound) == dflt)
20757 return;
20758
20759 /* FALLTHRU */
20760
20761 default:
20762 /* Because of the complex interaction there can be with other GNAT
20763 encodings, GDB isn't ready yet to handle proper DWARF description
20764 for self-referencial subrange bounds: let GNAT encodings do the
20765 magic in such a case. */
20766 if (is_ada ()
20767 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20768 && contains_placeholder_p (bound))
20769 return;
20770
20771 add_scalar_info (subrange_die, bound_attr, bound,
20772 dw_scalar_form_constant
20773 | dw_scalar_form_exprloc
20774 | dw_scalar_form_reference,
20775 context);
20776 return;
20777 }
20778 }
20779
20780 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20781 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20782 Note that the block of subscript information for an array type also
20783 includes information about the element type of the given array type.
20784
20785 This function reuses previously set type and bound information if
20786 available. */
20787
20788 static void
20789 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20790 {
20791 unsigned dimension_number;
20792 tree lower, upper;
20793 dw_die_ref child = type_die->die_child;
20794
20795 for (dimension_number = 0;
20796 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20797 type = TREE_TYPE (type), dimension_number++)
20798 {
20799 tree domain = TYPE_DOMAIN (type);
20800
20801 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20802 break;
20803
20804 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20805 and (in GNU C only) variable bounds. Handle all three forms
20806 here. */
20807
20808 /* Find and reuse a previously generated DW_TAG_subrange_type if
20809 available.
20810
20811 For multi-dimensional arrays, as we iterate through the
20812 various dimensions in the enclosing for loop above, we also
20813 iterate through the DIE children and pick at each
20814 DW_TAG_subrange_type previously generated (if available).
20815 Each child DW_TAG_subrange_type DIE describes the range of
20816 the current dimension. At this point we should have as many
20817 DW_TAG_subrange_type's as we have dimensions in the
20818 array. */
20819 dw_die_ref subrange_die = NULL;
20820 if (child)
20821 while (1)
20822 {
20823 child = child->die_sib;
20824 if (child->die_tag == DW_TAG_subrange_type)
20825 subrange_die = child;
20826 if (child == type_die->die_child)
20827 {
20828 /* If we wrapped around, stop looking next time. */
20829 child = NULL;
20830 break;
20831 }
20832 if (child->die_tag == DW_TAG_subrange_type)
20833 break;
20834 }
20835 if (!subrange_die)
20836 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20837
20838 if (domain)
20839 {
20840 /* We have an array type with specified bounds. */
20841 lower = TYPE_MIN_VALUE (domain);
20842 upper = TYPE_MAX_VALUE (domain);
20843
20844 /* Define the index type. */
20845 if (TREE_TYPE (domain)
20846 && !get_AT (subrange_die, DW_AT_type))
20847 {
20848 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20849 TREE_TYPE field. We can't emit debug info for this
20850 because it is an unnamed integral type. */
20851 if (TREE_CODE (domain) == INTEGER_TYPE
20852 && TYPE_NAME (domain) == NULL_TREE
20853 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20854 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20855 ;
20856 else
20857 add_type_attribute (subrange_die, TREE_TYPE (domain),
20858 TYPE_UNQUALIFIED, false, type_die);
20859 }
20860
20861 /* ??? If upper is NULL, the array has unspecified length,
20862 but it does have a lower bound. This happens with Fortran
20863 dimension arr(N:*)
20864 Since the debugger is definitely going to need to know N
20865 to produce useful results, go ahead and output the lower
20866 bound solo, and hope the debugger can cope. */
20867
20868 if (!get_AT (subrange_die, DW_AT_lower_bound))
20869 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20870 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20871 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20872 }
20873
20874 /* Otherwise we have an array type with an unspecified length. The
20875 DWARF-2 spec does not say how to handle this; let's just leave out the
20876 bounds. */
20877 }
20878 }
20879
20880 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20881
20882 static void
20883 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20884 {
20885 dw_die_ref decl_die;
20886 HOST_WIDE_INT size;
20887 dw_loc_descr_ref size_expr = NULL;
20888
20889 switch (TREE_CODE (tree_node))
20890 {
20891 case ERROR_MARK:
20892 size = 0;
20893 break;
20894 case ENUMERAL_TYPE:
20895 case RECORD_TYPE:
20896 case UNION_TYPE:
20897 case QUAL_UNION_TYPE:
20898 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20899 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20900 {
20901 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20902 return;
20903 }
20904 size_expr = type_byte_size (tree_node, &size);
20905 break;
20906 case FIELD_DECL:
20907 /* For a data member of a struct or union, the DW_AT_byte_size is
20908 generally given as the number of bytes normally allocated for an
20909 object of the *declared* type of the member itself. This is true
20910 even for bit-fields. */
20911 size = int_size_in_bytes (field_type (tree_node));
20912 break;
20913 default:
20914 gcc_unreachable ();
20915 }
20916
20917 /* Support for dynamically-sized objects was introduced by DWARFv3.
20918 At the moment, GDB does not handle variable byte sizes very well,
20919 though. */
20920 if ((dwarf_version >= 3 || !dwarf_strict)
20921 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20922 && size_expr != NULL)
20923 add_AT_loc (die, DW_AT_byte_size, size_expr);
20924
20925 /* Note that `size' might be -1 when we get to this point. If it is, that
20926 indicates that the byte size of the entity in question is variable and
20927 that we could not generate a DWARF expression that computes it. */
20928 if (size >= 0)
20929 add_AT_unsigned (die, DW_AT_byte_size, size);
20930 }
20931
20932 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20933 alignment. */
20934
20935 static void
20936 add_alignment_attribute (dw_die_ref die, tree tree_node)
20937 {
20938 if (dwarf_version < 5 && dwarf_strict)
20939 return;
20940
20941 unsigned align;
20942
20943 if (DECL_P (tree_node))
20944 {
20945 if (!DECL_USER_ALIGN (tree_node))
20946 return;
20947
20948 align = DECL_ALIGN_UNIT (tree_node);
20949 }
20950 else if (TYPE_P (tree_node))
20951 {
20952 if (!TYPE_USER_ALIGN (tree_node))
20953 return;
20954
20955 align = TYPE_ALIGN_UNIT (tree_node);
20956 }
20957 else
20958 gcc_unreachable ();
20959
20960 add_AT_unsigned (die, DW_AT_alignment, align);
20961 }
20962
20963 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20964 which specifies the distance in bits from the highest order bit of the
20965 "containing object" for the bit-field to the highest order bit of the
20966 bit-field itself.
20967
20968 For any given bit-field, the "containing object" is a hypothetical object
20969 (of some integral or enum type) within which the given bit-field lives. The
20970 type of this hypothetical "containing object" is always the same as the
20971 declared type of the individual bit-field itself. The determination of the
20972 exact location of the "containing object" for a bit-field is rather
20973 complicated. It's handled by the `field_byte_offset' function (above).
20974
20975 CTX is required: see the comment for VLR_CONTEXT.
20976
20977 Note that it is the size (in bytes) of the hypothetical "containing object"
20978 which will be given in the DW_AT_byte_size attribute for this bit-field.
20979 (See `byte_size_attribute' above). */
20980
20981 static inline void
20982 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20983 {
20984 HOST_WIDE_INT object_offset_in_bytes;
20985 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20986 HOST_WIDE_INT bitpos_int;
20987 HOST_WIDE_INT highest_order_object_bit_offset;
20988 HOST_WIDE_INT highest_order_field_bit_offset;
20989 HOST_WIDE_INT bit_offset;
20990
20991 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20992
20993 /* Must be a field and a bit field. */
20994 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20995
20996 /* We can't yet handle bit-fields whose offsets are variable, so if we
20997 encounter such things, just return without generating any attribute
20998 whatsoever. Likewise for variable or too large size. */
20999 if (! tree_fits_shwi_p (bit_position (decl))
21000 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21001 return;
21002
21003 bitpos_int = int_bit_position (decl);
21004
21005 /* Note that the bit offset is always the distance (in bits) from the
21006 highest-order bit of the "containing object" to the highest-order bit of
21007 the bit-field itself. Since the "high-order end" of any object or field
21008 is different on big-endian and little-endian machines, the computation
21009 below must take account of these differences. */
21010 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21011 highest_order_field_bit_offset = bitpos_int;
21012
21013 if (! BYTES_BIG_ENDIAN)
21014 {
21015 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21016 highest_order_object_bit_offset +=
21017 simple_type_size_in_bits (original_type);
21018 }
21019
21020 bit_offset
21021 = (! BYTES_BIG_ENDIAN
21022 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21023 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21024
21025 if (bit_offset < 0)
21026 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21027 else
21028 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21029 }
21030
21031 /* For a FIELD_DECL node which represents a bit field, output an attribute
21032 which specifies the length in bits of the given field. */
21033
21034 static inline void
21035 add_bit_size_attribute (dw_die_ref die, tree decl)
21036 {
21037 /* Must be a field and a bit field. */
21038 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21039 && DECL_BIT_FIELD_TYPE (decl));
21040
21041 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21042 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21043 }
21044
21045 /* If the compiled language is ANSI C, then add a 'prototyped'
21046 attribute, if arg types are given for the parameters of a function. */
21047
21048 static inline void
21049 add_prototyped_attribute (dw_die_ref die, tree func_type)
21050 {
21051 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21052 {
21053 case DW_LANG_C:
21054 case DW_LANG_C89:
21055 case DW_LANG_C99:
21056 case DW_LANG_C11:
21057 case DW_LANG_ObjC:
21058 if (prototype_p (func_type))
21059 add_AT_flag (die, DW_AT_prototyped, 1);
21060 break;
21061 default:
21062 break;
21063 }
21064 }
21065
21066 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21067 by looking in the type declaration, the object declaration equate table or
21068 the block mapping. */
21069
21070 static inline dw_die_ref
21071 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21072 {
21073 dw_die_ref origin_die = NULL;
21074
21075 if (DECL_P (origin))
21076 {
21077 dw_die_ref c;
21078 origin_die = lookup_decl_die (origin);
21079 /* "Unwrap" the decls DIE which we put in the imported unit context.
21080 We are looking for the abstract copy here. */
21081 if (in_lto_p
21082 && origin_die
21083 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21084 /* ??? Identify this better. */
21085 && c->with_offset)
21086 origin_die = c;
21087 }
21088 else if (TYPE_P (origin))
21089 origin_die = lookup_type_die (origin);
21090 else if (TREE_CODE (origin) == BLOCK)
21091 origin_die = BLOCK_DIE (origin);
21092
21093 /* XXX: Functions that are never lowered don't always have correct block
21094 trees (in the case of java, they simply have no block tree, in some other
21095 languages). For these functions, there is nothing we can really do to
21096 output correct debug info for inlined functions in all cases. Rather
21097 than die, we'll just produce deficient debug info now, in that we will
21098 have variables without a proper abstract origin. In the future, when all
21099 functions are lowered, we should re-add a gcc_assert (origin_die)
21100 here. */
21101
21102 if (origin_die)
21103 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21104 return origin_die;
21105 }
21106
21107 /* We do not currently support the pure_virtual attribute. */
21108
21109 static inline void
21110 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21111 {
21112 if (DECL_VINDEX (func_decl))
21113 {
21114 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21115
21116 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21117 add_AT_loc (die, DW_AT_vtable_elem_location,
21118 new_loc_descr (DW_OP_constu,
21119 tree_to_shwi (DECL_VINDEX (func_decl)),
21120 0));
21121
21122 /* GNU extension: Record what type this method came from originally. */
21123 if (debug_info_level > DINFO_LEVEL_TERSE
21124 && DECL_CONTEXT (func_decl))
21125 add_AT_die_ref (die, DW_AT_containing_type,
21126 lookup_type_die (DECL_CONTEXT (func_decl)));
21127 }
21128 }
21129 \f
21130 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21131 given decl. This used to be a vendor extension until after DWARF 4
21132 standardized it. */
21133
21134 static void
21135 add_linkage_attr (dw_die_ref die, tree decl)
21136 {
21137 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21138
21139 /* Mimic what assemble_name_raw does with a leading '*'. */
21140 if (name[0] == '*')
21141 name = &name[1];
21142
21143 if (dwarf_version >= 4)
21144 add_AT_string (die, DW_AT_linkage_name, name);
21145 else
21146 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21147 }
21148
21149 /* Add source coordinate attributes for the given decl. */
21150
21151 static void
21152 add_src_coords_attributes (dw_die_ref die, tree decl)
21153 {
21154 expanded_location s;
21155
21156 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21157 return;
21158 s = expand_location (DECL_SOURCE_LOCATION (decl));
21159 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21160 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21161 if (debug_column_info && s.column)
21162 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21163 }
21164
21165 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21166
21167 static void
21168 add_linkage_name_raw (dw_die_ref die, tree decl)
21169 {
21170 /* Defer until we have an assembler name set. */
21171 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21172 {
21173 limbo_die_node *asm_name;
21174
21175 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21176 asm_name->die = die;
21177 asm_name->created_for = decl;
21178 asm_name->next = deferred_asm_name;
21179 deferred_asm_name = asm_name;
21180 }
21181 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21182 add_linkage_attr (die, decl);
21183 }
21184
21185 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21186
21187 static void
21188 add_linkage_name (dw_die_ref die, tree decl)
21189 {
21190 if (debug_info_level > DINFO_LEVEL_NONE
21191 && VAR_OR_FUNCTION_DECL_P (decl)
21192 && TREE_PUBLIC (decl)
21193 && !(VAR_P (decl) && DECL_REGISTER (decl))
21194 && die->die_tag != DW_TAG_member)
21195 add_linkage_name_raw (die, decl);
21196 }
21197
21198 /* Add a DW_AT_name attribute and source coordinate attribute for the
21199 given decl, but only if it actually has a name. */
21200
21201 static void
21202 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21203 bool no_linkage_name)
21204 {
21205 tree decl_name;
21206
21207 decl_name = DECL_NAME (decl);
21208 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21209 {
21210 const char *name = dwarf2_name (decl, 0);
21211 if (name)
21212 add_name_attribute (die, name);
21213 if (! DECL_ARTIFICIAL (decl))
21214 add_src_coords_attributes (die, decl);
21215
21216 if (!no_linkage_name)
21217 add_linkage_name (die, decl);
21218 }
21219
21220 #ifdef VMS_DEBUGGING_INFO
21221 /* Get the function's name, as described by its RTL. This may be different
21222 from the DECL_NAME name used in the source file. */
21223 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21224 {
21225 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21226 XEXP (DECL_RTL (decl), 0), false);
21227 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21228 }
21229 #endif /* VMS_DEBUGGING_INFO */
21230 }
21231
21232 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21233
21234 static void
21235 add_discr_value (dw_die_ref die, dw_discr_value *value)
21236 {
21237 dw_attr_node attr;
21238
21239 attr.dw_attr = DW_AT_discr_value;
21240 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21241 attr.dw_attr_val.val_entry = NULL;
21242 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21243 if (value->pos)
21244 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21245 else
21246 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21247 add_dwarf_attr (die, &attr);
21248 }
21249
21250 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21251
21252 static void
21253 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21254 {
21255 dw_attr_node attr;
21256
21257 attr.dw_attr = DW_AT_discr_list;
21258 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21259 attr.dw_attr_val.val_entry = NULL;
21260 attr.dw_attr_val.v.val_discr_list = discr_list;
21261 add_dwarf_attr (die, &attr);
21262 }
21263
21264 static inline dw_discr_list_ref
21265 AT_discr_list (dw_attr_node *attr)
21266 {
21267 return attr->dw_attr_val.v.val_discr_list;
21268 }
21269
21270 #ifdef VMS_DEBUGGING_INFO
21271 /* Output the debug main pointer die for VMS */
21272
21273 void
21274 dwarf2out_vms_debug_main_pointer (void)
21275 {
21276 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21277 dw_die_ref die;
21278
21279 /* Allocate the VMS debug main subprogram die. */
21280 die = new_die_raw (DW_TAG_subprogram);
21281 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21282 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21283 current_function_funcdef_no);
21284 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21285
21286 /* Make it the first child of comp_unit_die (). */
21287 die->die_parent = comp_unit_die ();
21288 if (comp_unit_die ()->die_child)
21289 {
21290 die->die_sib = comp_unit_die ()->die_child->die_sib;
21291 comp_unit_die ()->die_child->die_sib = die;
21292 }
21293 else
21294 {
21295 die->die_sib = die;
21296 comp_unit_die ()->die_child = die;
21297 }
21298 }
21299 #endif /* VMS_DEBUGGING_INFO */
21300
21301 /* Push a new declaration scope. */
21302
21303 static void
21304 push_decl_scope (tree scope)
21305 {
21306 vec_safe_push (decl_scope_table, scope);
21307 }
21308
21309 /* Pop a declaration scope. */
21310
21311 static inline void
21312 pop_decl_scope (void)
21313 {
21314 decl_scope_table->pop ();
21315 }
21316
21317 /* walk_tree helper function for uses_local_type, below. */
21318
21319 static tree
21320 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21321 {
21322 if (!TYPE_P (*tp))
21323 *walk_subtrees = 0;
21324 else
21325 {
21326 tree name = TYPE_NAME (*tp);
21327 if (name && DECL_P (name) && decl_function_context (name))
21328 return *tp;
21329 }
21330 return NULL_TREE;
21331 }
21332
21333 /* If TYPE involves a function-local type (including a local typedef to a
21334 non-local type), returns that type; otherwise returns NULL_TREE. */
21335
21336 static tree
21337 uses_local_type (tree type)
21338 {
21339 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21340 return used;
21341 }
21342
21343 /* Return the DIE for the scope that immediately contains this type.
21344 Non-named types that do not involve a function-local type get global
21345 scope. Named types nested in namespaces or other types get their
21346 containing scope. All other types (i.e. function-local named types) get
21347 the current active scope. */
21348
21349 static dw_die_ref
21350 scope_die_for (tree t, dw_die_ref context_die)
21351 {
21352 dw_die_ref scope_die = NULL;
21353 tree containing_scope;
21354
21355 /* Non-types always go in the current scope. */
21356 gcc_assert (TYPE_P (t));
21357
21358 /* Use the scope of the typedef, rather than the scope of the type
21359 it refers to. */
21360 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21361 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21362 else
21363 containing_scope = TYPE_CONTEXT (t);
21364
21365 /* Use the containing namespace if there is one. */
21366 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21367 {
21368 if (context_die == lookup_decl_die (containing_scope))
21369 /* OK */;
21370 else if (debug_info_level > DINFO_LEVEL_TERSE)
21371 context_die = get_context_die (containing_scope);
21372 else
21373 containing_scope = NULL_TREE;
21374 }
21375
21376 /* Ignore function type "scopes" from the C frontend. They mean that
21377 a tagged type is local to a parmlist of a function declarator, but
21378 that isn't useful to DWARF. */
21379 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21380 containing_scope = NULL_TREE;
21381
21382 if (SCOPE_FILE_SCOPE_P (containing_scope))
21383 {
21384 /* If T uses a local type keep it local as well, to avoid references
21385 to function-local DIEs from outside the function. */
21386 if (current_function_decl && uses_local_type (t))
21387 scope_die = context_die;
21388 else
21389 scope_die = comp_unit_die ();
21390 }
21391 else if (TYPE_P (containing_scope))
21392 {
21393 /* For types, we can just look up the appropriate DIE. */
21394 if (debug_info_level > DINFO_LEVEL_TERSE)
21395 scope_die = get_context_die (containing_scope);
21396 else
21397 {
21398 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21399 if (scope_die == NULL)
21400 scope_die = comp_unit_die ();
21401 }
21402 }
21403 else
21404 scope_die = context_die;
21405
21406 return scope_die;
21407 }
21408
21409 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21410
21411 static inline int
21412 local_scope_p (dw_die_ref context_die)
21413 {
21414 for (; context_die; context_die = context_die->die_parent)
21415 if (context_die->die_tag == DW_TAG_inlined_subroutine
21416 || context_die->die_tag == DW_TAG_subprogram)
21417 return 1;
21418
21419 return 0;
21420 }
21421
21422 /* Returns nonzero if CONTEXT_DIE is a class. */
21423
21424 static inline int
21425 class_scope_p (dw_die_ref context_die)
21426 {
21427 return (context_die
21428 && (context_die->die_tag == DW_TAG_structure_type
21429 || context_die->die_tag == DW_TAG_class_type
21430 || context_die->die_tag == DW_TAG_interface_type
21431 || context_die->die_tag == DW_TAG_union_type));
21432 }
21433
21434 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21435 whether or not to treat a DIE in this context as a declaration. */
21436
21437 static inline int
21438 class_or_namespace_scope_p (dw_die_ref context_die)
21439 {
21440 return (class_scope_p (context_die)
21441 || (context_die && context_die->die_tag == DW_TAG_namespace));
21442 }
21443
21444 /* Many forms of DIEs require a "type description" attribute. This
21445 routine locates the proper "type descriptor" die for the type given
21446 by 'type' plus any additional qualifiers given by 'cv_quals', and
21447 adds a DW_AT_type attribute below the given die. */
21448
21449 static void
21450 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21451 bool reverse, dw_die_ref context_die)
21452 {
21453 enum tree_code code = TREE_CODE (type);
21454 dw_die_ref type_die = NULL;
21455
21456 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21457 or fixed-point type, use the inner type. This is because we have no
21458 support for unnamed types in base_type_die. This can happen if this is
21459 an Ada subrange type. Correct solution is emit a subrange type die. */
21460 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21461 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21462 type = TREE_TYPE (type), code = TREE_CODE (type);
21463
21464 if (code == ERROR_MARK
21465 /* Handle a special case. For functions whose return type is void, we
21466 generate *no* type attribute. (Note that no object may have type
21467 `void', so this only applies to function return types). */
21468 || code == VOID_TYPE)
21469 return;
21470
21471 type_die = modified_type_die (type,
21472 cv_quals | TYPE_QUALS (type),
21473 reverse,
21474 context_die);
21475
21476 if (type_die != NULL)
21477 add_AT_die_ref (object_die, DW_AT_type, type_die);
21478 }
21479
21480 /* Given an object die, add the calling convention attribute for the
21481 function call type. */
21482 static void
21483 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21484 {
21485 enum dwarf_calling_convention value = DW_CC_normal;
21486
21487 value = ((enum dwarf_calling_convention)
21488 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21489
21490 if (is_fortran ()
21491 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21492 {
21493 /* DWARF 2 doesn't provide a way to identify a program's source-level
21494 entry point. DW_AT_calling_convention attributes are only meant
21495 to describe functions' calling conventions. However, lacking a
21496 better way to signal the Fortran main program, we used this for
21497 a long time, following existing custom. Now, DWARF 4 has
21498 DW_AT_main_subprogram, which we add below, but some tools still
21499 rely on the old way, which we thus keep. */
21500 value = DW_CC_program;
21501
21502 if (dwarf_version >= 4 || !dwarf_strict)
21503 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21504 }
21505
21506 /* Only add the attribute if the backend requests it, and
21507 is not DW_CC_normal. */
21508 if (value && (value != DW_CC_normal))
21509 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21510 }
21511
21512 /* Given a tree pointer to a struct, class, union, or enum type node, return
21513 a pointer to the (string) tag name for the given type, or zero if the type
21514 was declared without a tag. */
21515
21516 static const char *
21517 type_tag (const_tree type)
21518 {
21519 const char *name = 0;
21520
21521 if (TYPE_NAME (type) != 0)
21522 {
21523 tree t = 0;
21524
21525 /* Find the IDENTIFIER_NODE for the type name. */
21526 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21527 && !TYPE_NAMELESS (type))
21528 t = TYPE_NAME (type);
21529
21530 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21531 a TYPE_DECL node, regardless of whether or not a `typedef' was
21532 involved. */
21533 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21534 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21535 {
21536 /* We want to be extra verbose. Don't call dwarf_name if
21537 DECL_NAME isn't set. The default hook for decl_printable_name
21538 doesn't like that, and in this context it's correct to return
21539 0, instead of "<anonymous>" or the like. */
21540 if (DECL_NAME (TYPE_NAME (type))
21541 && !DECL_NAMELESS (TYPE_NAME (type)))
21542 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21543 }
21544
21545 /* Now get the name as a string, or invent one. */
21546 if (!name && t != 0)
21547 name = IDENTIFIER_POINTER (t);
21548 }
21549
21550 return (name == 0 || *name == '\0') ? 0 : name;
21551 }
21552
21553 /* Return the type associated with a data member, make a special check
21554 for bit field types. */
21555
21556 static inline tree
21557 member_declared_type (const_tree member)
21558 {
21559 return (DECL_BIT_FIELD_TYPE (member)
21560 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21561 }
21562
21563 /* Get the decl's label, as described by its RTL. This may be different
21564 from the DECL_NAME name used in the source file. */
21565
21566 #if 0
21567 static const char *
21568 decl_start_label (tree decl)
21569 {
21570 rtx x;
21571 const char *fnname;
21572
21573 x = DECL_RTL (decl);
21574 gcc_assert (MEM_P (x));
21575
21576 x = XEXP (x, 0);
21577 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21578
21579 fnname = XSTR (x, 0);
21580 return fnname;
21581 }
21582 #endif
21583 \f
21584 /* For variable-length arrays that have been previously generated, but
21585 may be incomplete due to missing subscript info, fill the subscript
21586 info. Return TRUE if this is one of those cases. */
21587 static bool
21588 fill_variable_array_bounds (tree type)
21589 {
21590 if (TREE_ASM_WRITTEN (type)
21591 && TREE_CODE (type) == ARRAY_TYPE
21592 && variably_modified_type_p (type, NULL))
21593 {
21594 dw_die_ref array_die = lookup_type_die (type);
21595 if (!array_die)
21596 return false;
21597 add_subscript_info (array_die, type, !is_ada ());
21598 return true;
21599 }
21600 return false;
21601 }
21602
21603 /* These routines generate the internal representation of the DIE's for
21604 the compilation unit. Debugging information is collected by walking
21605 the declaration trees passed in from dwarf2out_decl(). */
21606
21607 static void
21608 gen_array_type_die (tree type, dw_die_ref context_die)
21609 {
21610 dw_die_ref array_die;
21611
21612 /* GNU compilers represent multidimensional array types as sequences of one
21613 dimensional array types whose element types are themselves array types.
21614 We sometimes squish that down to a single array_type DIE with multiple
21615 subscripts in the Dwarf debugging info. The draft Dwarf specification
21616 say that we are allowed to do this kind of compression in C, because
21617 there is no difference between an array of arrays and a multidimensional
21618 array. We don't do this for Ada to remain as close as possible to the
21619 actual representation, which is especially important against the language
21620 flexibilty wrt arrays of variable size. */
21621
21622 bool collapse_nested_arrays = !is_ada ();
21623
21624 if (fill_variable_array_bounds (type))
21625 return;
21626
21627 dw_die_ref scope_die = scope_die_for (type, context_die);
21628 tree element_type;
21629
21630 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21631 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21632 if (TYPE_STRING_FLAG (type)
21633 && TREE_CODE (type) == ARRAY_TYPE
21634 && is_fortran ()
21635 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21636 {
21637 HOST_WIDE_INT size;
21638
21639 array_die = new_die (DW_TAG_string_type, scope_die, type);
21640 add_name_attribute (array_die, type_tag (type));
21641 equate_type_number_to_die (type, array_die);
21642 size = int_size_in_bytes (type);
21643 if (size >= 0)
21644 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21645 /* ??? We can't annotate types late, but for LTO we may not
21646 generate a location early either (gfortran.dg/save_6.f90). */
21647 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21648 && TYPE_DOMAIN (type) != NULL_TREE
21649 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21650 {
21651 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21652 tree rszdecl = szdecl;
21653
21654 size = int_size_in_bytes (TREE_TYPE (szdecl));
21655 if (!DECL_P (szdecl))
21656 {
21657 if (TREE_CODE (szdecl) == INDIRECT_REF
21658 && DECL_P (TREE_OPERAND (szdecl, 0)))
21659 {
21660 rszdecl = TREE_OPERAND (szdecl, 0);
21661 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21662 != DWARF2_ADDR_SIZE)
21663 size = 0;
21664 }
21665 else
21666 size = 0;
21667 }
21668 if (size > 0)
21669 {
21670 dw_loc_list_ref loc
21671 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21672 NULL);
21673 if (loc)
21674 {
21675 add_AT_location_description (array_die, DW_AT_string_length,
21676 loc);
21677 if (size != DWARF2_ADDR_SIZE)
21678 add_AT_unsigned (array_die, dwarf_version >= 5
21679 ? DW_AT_string_length_byte_size
21680 : DW_AT_byte_size, size);
21681 }
21682 }
21683 }
21684 return;
21685 }
21686
21687 array_die = new_die (DW_TAG_array_type, scope_die, type);
21688 add_name_attribute (array_die, type_tag (type));
21689 equate_type_number_to_die (type, array_die);
21690
21691 if (TREE_CODE (type) == VECTOR_TYPE)
21692 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21693
21694 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21695 if (is_fortran ()
21696 && TREE_CODE (type) == ARRAY_TYPE
21697 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21698 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21699 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21700
21701 #if 0
21702 /* We default the array ordering. Debuggers will probably do the right
21703 things even if DW_AT_ordering is not present. It's not even an issue
21704 until we start to get into multidimensional arrays anyway. If a debugger
21705 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21706 then we'll have to put the DW_AT_ordering attribute back in. (But if
21707 and when we find out that we need to put these in, we will only do so
21708 for multidimensional arrays. */
21709 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21710 #endif
21711
21712 if (TREE_CODE (type) == VECTOR_TYPE)
21713 {
21714 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21715 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21716 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21717 add_bound_info (subrange_die, DW_AT_upper_bound,
21718 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21719 }
21720 else
21721 add_subscript_info (array_die, type, collapse_nested_arrays);
21722
21723 /* Add representation of the type of the elements of this array type and
21724 emit the corresponding DIE if we haven't done it already. */
21725 element_type = TREE_TYPE (type);
21726 if (collapse_nested_arrays)
21727 while (TREE_CODE (element_type) == ARRAY_TYPE)
21728 {
21729 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21730 break;
21731 element_type = TREE_TYPE (element_type);
21732 }
21733
21734 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21735 TREE_CODE (type) == ARRAY_TYPE
21736 && TYPE_REVERSE_STORAGE_ORDER (type),
21737 context_die);
21738
21739 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21740 if (TYPE_ARTIFICIAL (type))
21741 add_AT_flag (array_die, DW_AT_artificial, 1);
21742
21743 if (get_AT (array_die, DW_AT_name))
21744 add_pubtype (type, array_die);
21745
21746 add_alignment_attribute (array_die, type);
21747 }
21748
21749 /* This routine generates DIE for array with hidden descriptor, details
21750 are filled into *info by a langhook. */
21751
21752 static void
21753 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21754 dw_die_ref context_die)
21755 {
21756 const dw_die_ref scope_die = scope_die_for (type, context_die);
21757 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21758 struct loc_descr_context context = { type, info->base_decl, NULL,
21759 false, false };
21760 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21761 int dim;
21762
21763 add_name_attribute (array_die, type_tag (type));
21764 equate_type_number_to_die (type, array_die);
21765
21766 if (info->ndimensions > 1)
21767 switch (info->ordering)
21768 {
21769 case array_descr_ordering_row_major:
21770 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21771 break;
21772 case array_descr_ordering_column_major:
21773 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21774 break;
21775 default:
21776 break;
21777 }
21778
21779 if (dwarf_version >= 3 || !dwarf_strict)
21780 {
21781 if (info->data_location)
21782 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21783 dw_scalar_form_exprloc, &context);
21784 if (info->associated)
21785 add_scalar_info (array_die, DW_AT_associated, info->associated,
21786 dw_scalar_form_constant
21787 | dw_scalar_form_exprloc
21788 | dw_scalar_form_reference, &context);
21789 if (info->allocated)
21790 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21791 dw_scalar_form_constant
21792 | dw_scalar_form_exprloc
21793 | dw_scalar_form_reference, &context);
21794 if (info->stride)
21795 {
21796 const enum dwarf_attribute attr
21797 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21798 const int forms
21799 = (info->stride_in_bits)
21800 ? dw_scalar_form_constant
21801 : (dw_scalar_form_constant
21802 | dw_scalar_form_exprloc
21803 | dw_scalar_form_reference);
21804
21805 add_scalar_info (array_die, attr, info->stride, forms, &context);
21806 }
21807 }
21808 if (dwarf_version >= 5)
21809 {
21810 if (info->rank)
21811 {
21812 add_scalar_info (array_die, DW_AT_rank, info->rank,
21813 dw_scalar_form_constant
21814 | dw_scalar_form_exprloc, &context);
21815 subrange_tag = DW_TAG_generic_subrange;
21816 context.placeholder_arg = true;
21817 }
21818 }
21819
21820 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21821
21822 for (dim = 0; dim < info->ndimensions; dim++)
21823 {
21824 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21825
21826 if (info->dimen[dim].bounds_type)
21827 add_type_attribute (subrange_die,
21828 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21829 false, context_die);
21830 if (info->dimen[dim].lower_bound)
21831 add_bound_info (subrange_die, DW_AT_lower_bound,
21832 info->dimen[dim].lower_bound, &context);
21833 if (info->dimen[dim].upper_bound)
21834 add_bound_info (subrange_die, DW_AT_upper_bound,
21835 info->dimen[dim].upper_bound, &context);
21836 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21837 add_scalar_info (subrange_die, DW_AT_byte_stride,
21838 info->dimen[dim].stride,
21839 dw_scalar_form_constant
21840 | dw_scalar_form_exprloc
21841 | dw_scalar_form_reference,
21842 &context);
21843 }
21844
21845 gen_type_die (info->element_type, context_die);
21846 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21847 TREE_CODE (type) == ARRAY_TYPE
21848 && TYPE_REVERSE_STORAGE_ORDER (type),
21849 context_die);
21850
21851 if (get_AT (array_die, DW_AT_name))
21852 add_pubtype (type, array_die);
21853
21854 add_alignment_attribute (array_die, type);
21855 }
21856
21857 #if 0
21858 static void
21859 gen_entry_point_die (tree decl, dw_die_ref context_die)
21860 {
21861 tree origin = decl_ultimate_origin (decl);
21862 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21863
21864 if (origin != NULL)
21865 add_abstract_origin_attribute (decl_die, origin);
21866 else
21867 {
21868 add_name_and_src_coords_attributes (decl_die, decl);
21869 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21870 TYPE_UNQUALIFIED, false, context_die);
21871 }
21872
21873 if (DECL_ABSTRACT_P (decl))
21874 equate_decl_number_to_die (decl, decl_die);
21875 else
21876 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21877 }
21878 #endif
21879
21880 /* Walk through the list of incomplete types again, trying once more to
21881 emit full debugging info for them. */
21882
21883 static void
21884 retry_incomplete_types (void)
21885 {
21886 set_early_dwarf s;
21887 int i;
21888
21889 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21890 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21891 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21892 vec_safe_truncate (incomplete_types, 0);
21893 }
21894
21895 /* Determine what tag to use for a record type. */
21896
21897 static enum dwarf_tag
21898 record_type_tag (tree type)
21899 {
21900 if (! lang_hooks.types.classify_record)
21901 return DW_TAG_structure_type;
21902
21903 switch (lang_hooks.types.classify_record (type))
21904 {
21905 case RECORD_IS_STRUCT:
21906 return DW_TAG_structure_type;
21907
21908 case RECORD_IS_CLASS:
21909 return DW_TAG_class_type;
21910
21911 case RECORD_IS_INTERFACE:
21912 if (dwarf_version >= 3 || !dwarf_strict)
21913 return DW_TAG_interface_type;
21914 return DW_TAG_structure_type;
21915
21916 default:
21917 gcc_unreachable ();
21918 }
21919 }
21920
21921 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21922 include all of the information about the enumeration values also. Each
21923 enumerated type name/value is listed as a child of the enumerated type
21924 DIE. */
21925
21926 static dw_die_ref
21927 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21928 {
21929 dw_die_ref type_die = lookup_type_die (type);
21930 dw_die_ref orig_type_die = type_die;
21931
21932 if (type_die == NULL)
21933 {
21934 type_die = new_die (DW_TAG_enumeration_type,
21935 scope_die_for (type, context_die), type);
21936 equate_type_number_to_die (type, type_die);
21937 add_name_attribute (type_die, type_tag (type));
21938 if ((dwarf_version >= 4 || !dwarf_strict)
21939 && ENUM_IS_SCOPED (type))
21940 add_AT_flag (type_die, DW_AT_enum_class, 1);
21941 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21942 add_AT_flag (type_die, DW_AT_declaration, 1);
21943 if (!dwarf_strict)
21944 add_AT_unsigned (type_die, DW_AT_encoding,
21945 TYPE_UNSIGNED (type)
21946 ? DW_ATE_unsigned
21947 : DW_ATE_signed);
21948 }
21949 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21950 return type_die;
21951 else
21952 remove_AT (type_die, DW_AT_declaration);
21953
21954 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21955 given enum type is incomplete, do not generate the DW_AT_byte_size
21956 attribute or the DW_AT_element_list attribute. */
21957 if (TYPE_SIZE (type))
21958 {
21959 tree link;
21960
21961 if (!ENUM_IS_OPAQUE (type))
21962 TREE_ASM_WRITTEN (type) = 1;
21963 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
21964 add_byte_size_attribute (type_die, type);
21965 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
21966 add_alignment_attribute (type_die, type);
21967 if ((dwarf_version >= 3 || !dwarf_strict)
21968 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
21969 {
21970 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21971 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21972 context_die);
21973 }
21974 if (TYPE_STUB_DECL (type) != NULL_TREE)
21975 {
21976 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
21977 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21978 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
21979 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21980 }
21981
21982 /* If the first reference to this type was as the return type of an
21983 inline function, then it may not have a parent. Fix this now. */
21984 if (type_die->die_parent == NULL)
21985 add_child_die (scope_die_for (type, context_die), type_die);
21986
21987 for (link = TYPE_VALUES (type);
21988 link != NULL; link = TREE_CHAIN (link))
21989 {
21990 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21991 tree value = TREE_VALUE (link);
21992
21993 gcc_assert (!ENUM_IS_OPAQUE (type));
21994 add_name_attribute (enum_die,
21995 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21996
21997 if (TREE_CODE (value) == CONST_DECL)
21998 value = DECL_INITIAL (value);
21999
22000 if (simple_type_size_in_bits (TREE_TYPE (value))
22001 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22002 {
22003 /* For constant forms created by add_AT_unsigned DWARF
22004 consumers (GDB, elfutils, etc.) always zero extend
22005 the value. Only when the actual value is negative
22006 do we need to use add_AT_int to generate a constant
22007 form that can represent negative values. */
22008 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22009 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22010 add_AT_unsigned (enum_die, DW_AT_const_value,
22011 (unsigned HOST_WIDE_INT) val);
22012 else
22013 add_AT_int (enum_die, DW_AT_const_value, val);
22014 }
22015 else
22016 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22017 that here. TODO: This should be re-worked to use correct
22018 signed/unsigned double tags for all cases. */
22019 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22020 }
22021
22022 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22023 if (TYPE_ARTIFICIAL (type)
22024 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22025 add_AT_flag (type_die, DW_AT_artificial, 1);
22026 }
22027 else
22028 add_AT_flag (type_die, DW_AT_declaration, 1);
22029
22030 add_pubtype (type, type_die);
22031
22032 return type_die;
22033 }
22034
22035 /* Generate a DIE to represent either a real live formal parameter decl or to
22036 represent just the type of some formal parameter position in some function
22037 type.
22038
22039 Note that this routine is a bit unusual because its argument may be a
22040 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22041 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22042 node. If it's the former then this function is being called to output a
22043 DIE to represent a formal parameter object (or some inlining thereof). If
22044 it's the latter, then this function is only being called to output a
22045 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22046 argument type of some subprogram type.
22047 If EMIT_NAME_P is true, name and source coordinate attributes
22048 are emitted. */
22049
22050 static dw_die_ref
22051 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22052 dw_die_ref context_die)
22053 {
22054 tree node_or_origin = node ? node : origin;
22055 tree ultimate_origin;
22056 dw_die_ref parm_die = NULL;
22057
22058 if (DECL_P (node_or_origin))
22059 {
22060 parm_die = lookup_decl_die (node);
22061
22062 /* If the contexts differ, we may not be talking about the same
22063 thing.
22064 ??? When in LTO the DIE parent is the "abstract" copy and the
22065 context_die is the specification "copy". But this whole block
22066 should eventually be no longer needed. */
22067 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22068 {
22069 if (!DECL_ABSTRACT_P (node))
22070 {
22071 /* This can happen when creating an inlined instance, in
22072 which case we need to create a new DIE that will get
22073 annotated with DW_AT_abstract_origin. */
22074 parm_die = NULL;
22075 }
22076 else
22077 gcc_unreachable ();
22078 }
22079
22080 if (parm_die && parm_die->die_parent == NULL)
22081 {
22082 /* Check that parm_die already has the right attributes that
22083 we would have added below. If any attributes are
22084 missing, fall through to add them. */
22085 if (! DECL_ABSTRACT_P (node_or_origin)
22086 && !get_AT (parm_die, DW_AT_location)
22087 && !get_AT (parm_die, DW_AT_const_value))
22088 /* We are missing location info, and are about to add it. */
22089 ;
22090 else
22091 {
22092 add_child_die (context_die, parm_die);
22093 return parm_die;
22094 }
22095 }
22096 }
22097
22098 /* If we have a previously generated DIE, use it, unless this is an
22099 concrete instance (origin != NULL), in which case we need a new
22100 DIE with a corresponding DW_AT_abstract_origin. */
22101 bool reusing_die;
22102 if (parm_die && origin == NULL)
22103 reusing_die = true;
22104 else
22105 {
22106 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22107 reusing_die = false;
22108 }
22109
22110 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22111 {
22112 case tcc_declaration:
22113 ultimate_origin = decl_ultimate_origin (node_or_origin);
22114 if (node || ultimate_origin)
22115 origin = ultimate_origin;
22116
22117 if (reusing_die)
22118 goto add_location;
22119
22120 if (origin != NULL)
22121 add_abstract_origin_attribute (parm_die, origin);
22122 else if (emit_name_p)
22123 add_name_and_src_coords_attributes (parm_die, node);
22124 if (origin == NULL
22125 || (! DECL_ABSTRACT_P (node_or_origin)
22126 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22127 decl_function_context
22128 (node_or_origin))))
22129 {
22130 tree type = TREE_TYPE (node_or_origin);
22131 if (decl_by_reference_p (node_or_origin))
22132 add_type_attribute (parm_die, TREE_TYPE (type),
22133 TYPE_UNQUALIFIED,
22134 false, context_die);
22135 else
22136 add_type_attribute (parm_die, type,
22137 decl_quals (node_or_origin),
22138 false, context_die);
22139 }
22140 if (origin == NULL && DECL_ARTIFICIAL (node))
22141 add_AT_flag (parm_die, DW_AT_artificial, 1);
22142 add_location:
22143 if (node && node != origin)
22144 equate_decl_number_to_die (node, parm_die);
22145 if (! DECL_ABSTRACT_P (node_or_origin))
22146 add_location_or_const_value_attribute (parm_die, node_or_origin,
22147 node == NULL);
22148
22149 break;
22150
22151 case tcc_type:
22152 /* We were called with some kind of a ..._TYPE node. */
22153 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22154 context_die);
22155 break;
22156
22157 default:
22158 gcc_unreachable ();
22159 }
22160
22161 return parm_die;
22162 }
22163
22164 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22165 children DW_TAG_formal_parameter DIEs representing the arguments of the
22166 parameter pack.
22167
22168 PARM_PACK must be a function parameter pack.
22169 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22170 must point to the subsequent arguments of the function PACK_ARG belongs to.
22171 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22172 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22173 following the last one for which a DIE was generated. */
22174
22175 static dw_die_ref
22176 gen_formal_parameter_pack_die (tree parm_pack,
22177 tree pack_arg,
22178 dw_die_ref subr_die,
22179 tree *next_arg)
22180 {
22181 tree arg;
22182 dw_die_ref parm_pack_die;
22183
22184 gcc_assert (parm_pack
22185 && lang_hooks.function_parameter_pack_p (parm_pack)
22186 && subr_die);
22187
22188 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22189 add_src_coords_attributes (parm_pack_die, parm_pack);
22190
22191 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22192 {
22193 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22194 parm_pack))
22195 break;
22196 gen_formal_parameter_die (arg, NULL,
22197 false /* Don't emit name attribute. */,
22198 parm_pack_die);
22199 }
22200 if (next_arg)
22201 *next_arg = arg;
22202 return parm_pack_die;
22203 }
22204
22205 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22206 at the end of an (ANSI prototyped) formal parameters list. */
22207
22208 static void
22209 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22210 {
22211 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22212 }
22213
22214 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22215 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22216 parameters as specified in some function type specification (except for
22217 those which appear as part of a function *definition*). */
22218
22219 static void
22220 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22221 {
22222 tree link;
22223 tree formal_type = NULL;
22224 tree first_parm_type;
22225 tree arg;
22226
22227 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22228 {
22229 arg = DECL_ARGUMENTS (function_or_method_type);
22230 function_or_method_type = TREE_TYPE (function_or_method_type);
22231 }
22232 else
22233 arg = NULL_TREE;
22234
22235 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22236
22237 /* Make our first pass over the list of formal parameter types and output a
22238 DW_TAG_formal_parameter DIE for each one. */
22239 for (link = first_parm_type; link; )
22240 {
22241 dw_die_ref parm_die;
22242
22243 formal_type = TREE_VALUE (link);
22244 if (formal_type == void_type_node)
22245 break;
22246
22247 /* Output a (nameless) DIE to represent the formal parameter itself. */
22248 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22249 {
22250 parm_die = gen_formal_parameter_die (formal_type, NULL,
22251 true /* Emit name attribute. */,
22252 context_die);
22253 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22254 && link == first_parm_type)
22255 {
22256 add_AT_flag (parm_die, DW_AT_artificial, 1);
22257 if (dwarf_version >= 3 || !dwarf_strict)
22258 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22259 }
22260 else if (arg && DECL_ARTIFICIAL (arg))
22261 add_AT_flag (parm_die, DW_AT_artificial, 1);
22262 }
22263
22264 link = TREE_CHAIN (link);
22265 if (arg)
22266 arg = DECL_CHAIN (arg);
22267 }
22268
22269 /* If this function type has an ellipsis, add a
22270 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22271 if (formal_type != void_type_node)
22272 gen_unspecified_parameters_die (function_or_method_type, context_die);
22273
22274 /* Make our second (and final) pass over the list of formal parameter types
22275 and output DIEs to represent those types (as necessary). */
22276 for (link = TYPE_ARG_TYPES (function_or_method_type);
22277 link && TREE_VALUE (link);
22278 link = TREE_CHAIN (link))
22279 gen_type_die (TREE_VALUE (link), context_die);
22280 }
22281
22282 /* We want to generate the DIE for TYPE so that we can generate the
22283 die for MEMBER, which has been defined; we will need to refer back
22284 to the member declaration nested within TYPE. If we're trying to
22285 generate minimal debug info for TYPE, processing TYPE won't do the
22286 trick; we need to attach the member declaration by hand. */
22287
22288 static void
22289 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22290 {
22291 gen_type_die (type, context_die);
22292
22293 /* If we're trying to avoid duplicate debug info, we may not have
22294 emitted the member decl for this function. Emit it now. */
22295 if (TYPE_STUB_DECL (type)
22296 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22297 && ! lookup_decl_die (member))
22298 {
22299 dw_die_ref type_die;
22300 gcc_assert (!decl_ultimate_origin (member));
22301
22302 push_decl_scope (type);
22303 type_die = lookup_type_die_strip_naming_typedef (type);
22304 if (TREE_CODE (member) == FUNCTION_DECL)
22305 gen_subprogram_die (member, type_die);
22306 else if (TREE_CODE (member) == FIELD_DECL)
22307 {
22308 /* Ignore the nameless fields that are used to skip bits but handle
22309 C++ anonymous unions and structs. */
22310 if (DECL_NAME (member) != NULL_TREE
22311 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22312 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22313 {
22314 struct vlr_context vlr_ctx = {
22315 DECL_CONTEXT (member), /* struct_type */
22316 NULL_TREE /* variant_part_offset */
22317 };
22318 gen_type_die (member_declared_type (member), type_die);
22319 gen_field_die (member, &vlr_ctx, type_die);
22320 }
22321 }
22322 else
22323 gen_variable_die (member, NULL_TREE, type_die);
22324
22325 pop_decl_scope ();
22326 }
22327 }
22328 \f
22329 /* Forward declare these functions, because they are mutually recursive
22330 with their set_block_* pairing functions. */
22331 static void set_decl_origin_self (tree);
22332
22333 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22334 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22335 that it points to the node itself, thus indicating that the node is its
22336 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22337 the given node is NULL, recursively descend the decl/block tree which
22338 it is the root of, and for each other ..._DECL or BLOCK node contained
22339 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22340 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22341 values to point to themselves. */
22342
22343 static void
22344 set_block_origin_self (tree stmt)
22345 {
22346 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22347 {
22348 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22349
22350 {
22351 tree local_decl;
22352
22353 for (local_decl = BLOCK_VARS (stmt);
22354 local_decl != NULL_TREE;
22355 local_decl = DECL_CHAIN (local_decl))
22356 /* Do not recurse on nested functions since the inlining status
22357 of parent and child can be different as per the DWARF spec. */
22358 if (TREE_CODE (local_decl) != FUNCTION_DECL
22359 && !DECL_EXTERNAL (local_decl))
22360 set_decl_origin_self (local_decl);
22361 }
22362
22363 {
22364 tree subblock;
22365
22366 for (subblock = BLOCK_SUBBLOCKS (stmt);
22367 subblock != NULL_TREE;
22368 subblock = BLOCK_CHAIN (subblock))
22369 set_block_origin_self (subblock); /* Recurse. */
22370 }
22371 }
22372 }
22373
22374 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22375 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22376 node to so that it points to the node itself, thus indicating that the
22377 node represents its own (abstract) origin. Additionally, if the
22378 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22379 the decl/block tree of which the given node is the root of, and for
22380 each other ..._DECL or BLOCK node contained therein whose
22381 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22382 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22383 point to themselves. */
22384
22385 static void
22386 set_decl_origin_self (tree decl)
22387 {
22388 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22389 {
22390 DECL_ABSTRACT_ORIGIN (decl) = decl;
22391 if (TREE_CODE (decl) == FUNCTION_DECL)
22392 {
22393 tree arg;
22394
22395 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22396 DECL_ABSTRACT_ORIGIN (arg) = arg;
22397 if (DECL_INITIAL (decl) != NULL_TREE
22398 && DECL_INITIAL (decl) != error_mark_node)
22399 set_block_origin_self (DECL_INITIAL (decl));
22400 }
22401 }
22402 }
22403 \f
22404 /* Mark the early DIE for DECL as the abstract instance. */
22405
22406 static void
22407 dwarf2out_abstract_function (tree decl)
22408 {
22409 dw_die_ref old_die;
22410
22411 /* Make sure we have the actual abstract inline, not a clone. */
22412 decl = DECL_ORIGIN (decl);
22413
22414 if (DECL_IGNORED_P (decl))
22415 return;
22416
22417 old_die = lookup_decl_die (decl);
22418 /* With early debug we always have an old DIE unless we are in LTO
22419 and the user did not compile but only link with debug. */
22420 if (in_lto_p && ! old_die)
22421 return;
22422 gcc_assert (old_die != NULL);
22423 if (get_AT (old_die, DW_AT_inline)
22424 || get_AT (old_die, DW_AT_abstract_origin))
22425 /* We've already generated the abstract instance. */
22426 return;
22427
22428 /* Go ahead and put DW_AT_inline on the DIE. */
22429 if (DECL_DECLARED_INLINE_P (decl))
22430 {
22431 if (cgraph_function_possibly_inlined_p (decl))
22432 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22433 else
22434 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22435 }
22436 else
22437 {
22438 if (cgraph_function_possibly_inlined_p (decl))
22439 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22440 else
22441 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22442 }
22443
22444 if (DECL_DECLARED_INLINE_P (decl)
22445 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22446 add_AT_flag (old_die, DW_AT_artificial, 1);
22447
22448 set_decl_origin_self (decl);
22449 }
22450
22451 /* Helper function of premark_used_types() which gets called through
22452 htab_traverse.
22453
22454 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22455 marked as unused by prune_unused_types. */
22456
22457 bool
22458 premark_used_types_helper (tree const &type, void *)
22459 {
22460 dw_die_ref die;
22461
22462 die = lookup_type_die (type);
22463 if (die != NULL)
22464 die->die_perennial_p = 1;
22465 return true;
22466 }
22467
22468 /* Helper function of premark_types_used_by_global_vars which gets called
22469 through htab_traverse.
22470
22471 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22472 marked as unused by prune_unused_types. The DIE of the type is marked
22473 only if the global variable using the type will actually be emitted. */
22474
22475 int
22476 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22477 void *)
22478 {
22479 struct types_used_by_vars_entry *entry;
22480 dw_die_ref die;
22481
22482 entry = (struct types_used_by_vars_entry *) *slot;
22483 gcc_assert (entry->type != NULL
22484 && entry->var_decl != NULL);
22485 die = lookup_type_die (entry->type);
22486 if (die)
22487 {
22488 /* Ask cgraph if the global variable really is to be emitted.
22489 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22490 varpool_node *node = varpool_node::get (entry->var_decl);
22491 if (node && node->definition)
22492 {
22493 die->die_perennial_p = 1;
22494 /* Keep the parent DIEs as well. */
22495 while ((die = die->die_parent) && die->die_perennial_p == 0)
22496 die->die_perennial_p = 1;
22497 }
22498 }
22499 return 1;
22500 }
22501
22502 /* Mark all members of used_types_hash as perennial. */
22503
22504 static void
22505 premark_used_types (struct function *fun)
22506 {
22507 if (fun && fun->used_types_hash)
22508 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22509 }
22510
22511 /* Mark all members of types_used_by_vars_entry as perennial. */
22512
22513 static void
22514 premark_types_used_by_global_vars (void)
22515 {
22516 if (types_used_by_vars_hash)
22517 types_used_by_vars_hash
22518 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22519 }
22520
22521 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22522 for CA_LOC call arg loc node. */
22523
22524 static dw_die_ref
22525 gen_call_site_die (tree decl, dw_die_ref subr_die,
22526 struct call_arg_loc_node *ca_loc)
22527 {
22528 dw_die_ref stmt_die = NULL, die;
22529 tree block = ca_loc->block;
22530
22531 while (block
22532 && block != DECL_INITIAL (decl)
22533 && TREE_CODE (block) == BLOCK)
22534 {
22535 stmt_die = BLOCK_DIE (block);
22536 if (stmt_die)
22537 break;
22538 block = BLOCK_SUPERCONTEXT (block);
22539 }
22540 if (stmt_die == NULL)
22541 stmt_die = subr_die;
22542 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22543 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22544 if (ca_loc->tail_call_p)
22545 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22546 if (ca_loc->symbol_ref)
22547 {
22548 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22549 if (tdie)
22550 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22551 else
22552 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22553 false);
22554 }
22555 return die;
22556 }
22557
22558 /* Generate a DIE to represent a declared function (either file-scope or
22559 block-local). */
22560
22561 static void
22562 gen_subprogram_die (tree decl, dw_die_ref context_die)
22563 {
22564 tree origin = decl_ultimate_origin (decl);
22565 dw_die_ref subr_die;
22566 dw_die_ref old_die = lookup_decl_die (decl);
22567
22568 /* This function gets called multiple times for different stages of
22569 the debug process. For example, for func() in this code:
22570
22571 namespace S
22572 {
22573 void func() { ... }
22574 }
22575
22576 ...we get called 4 times. Twice in early debug and twice in
22577 late debug:
22578
22579 Early debug
22580 -----------
22581
22582 1. Once while generating func() within the namespace. This is
22583 the declaration. The declaration bit below is set, as the
22584 context is the namespace.
22585
22586 A new DIE will be generated with DW_AT_declaration set.
22587
22588 2. Once for func() itself. This is the specification. The
22589 declaration bit below is clear as the context is the CU.
22590
22591 We will use the cached DIE from (1) to create a new DIE with
22592 DW_AT_specification pointing to the declaration in (1).
22593
22594 Late debug via rest_of_handle_final()
22595 -------------------------------------
22596
22597 3. Once generating func() within the namespace. This is also the
22598 declaration, as in (1), but this time we will early exit below
22599 as we have a cached DIE and a declaration needs no additional
22600 annotations (no locations), as the source declaration line
22601 info is enough.
22602
22603 4. Once for func() itself. As in (2), this is the specification,
22604 but this time we will re-use the cached DIE, and just annotate
22605 it with the location information that should now be available.
22606
22607 For something without namespaces, but with abstract instances, we
22608 are also called a multiple times:
22609
22610 class Base
22611 {
22612 public:
22613 Base (); // constructor declaration (1)
22614 };
22615
22616 Base::Base () { } // constructor specification (2)
22617
22618 Early debug
22619 -----------
22620
22621 1. Once for the Base() constructor by virtue of it being a
22622 member of the Base class. This is done via
22623 rest_of_type_compilation.
22624
22625 This is a declaration, so a new DIE will be created with
22626 DW_AT_declaration.
22627
22628 2. Once for the Base() constructor definition, but this time
22629 while generating the abstract instance of the base
22630 constructor (__base_ctor) which is being generated via early
22631 debug of reachable functions.
22632
22633 Even though we have a cached version of the declaration (1),
22634 we will create a DW_AT_specification of the declaration DIE
22635 in (1).
22636
22637 3. Once for the __base_ctor itself, but this time, we generate
22638 an DW_AT_abstract_origin version of the DW_AT_specification in
22639 (2).
22640
22641 Late debug via rest_of_handle_final
22642 -----------------------------------
22643
22644 4. One final time for the __base_ctor (which will have a cached
22645 DIE with DW_AT_abstract_origin created in (3). This time,
22646 we will just annotate the location information now
22647 available.
22648 */
22649 int declaration = (current_function_decl != decl
22650 || class_or_namespace_scope_p (context_die));
22651
22652 /* A declaration that has been previously dumped needs no
22653 additional information. */
22654 if (old_die && declaration)
22655 return;
22656
22657 /* Now that the C++ front end lazily declares artificial member fns, we
22658 might need to retrofit the declaration into its class. */
22659 if (!declaration && !origin && !old_die
22660 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22661 && !class_or_namespace_scope_p (context_die)
22662 && debug_info_level > DINFO_LEVEL_TERSE)
22663 old_die = force_decl_die (decl);
22664
22665 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22666 if (origin != NULL)
22667 {
22668 gcc_assert (!declaration || local_scope_p (context_die));
22669
22670 /* Fixup die_parent for the abstract instance of a nested
22671 inline function. */
22672 if (old_die && old_die->die_parent == NULL)
22673 add_child_die (context_die, old_die);
22674
22675 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22676 {
22677 /* If we have a DW_AT_abstract_origin we have a working
22678 cached version. */
22679 subr_die = old_die;
22680 }
22681 else
22682 {
22683 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22684 add_abstract_origin_attribute (subr_die, origin);
22685 /* This is where the actual code for a cloned function is.
22686 Let's emit linkage name attribute for it. This helps
22687 debuggers to e.g, set breakpoints into
22688 constructors/destructors when the user asks "break
22689 K::K". */
22690 add_linkage_name (subr_die, decl);
22691 }
22692 }
22693 /* A cached copy, possibly from early dwarf generation. Reuse as
22694 much as possible. */
22695 else if (old_die)
22696 {
22697 if (!get_AT_flag (old_die, DW_AT_declaration)
22698 /* We can have a normal definition following an inline one in the
22699 case of redefinition of GNU C extern inlines.
22700 It seems reasonable to use AT_specification in this case. */
22701 && !get_AT (old_die, DW_AT_inline))
22702 {
22703 /* Detect and ignore this case, where we are trying to output
22704 something we have already output. */
22705 if (get_AT (old_die, DW_AT_low_pc)
22706 || get_AT (old_die, DW_AT_ranges))
22707 return;
22708
22709 /* If we have no location information, this must be a
22710 partially generated DIE from early dwarf generation.
22711 Fall through and generate it. */
22712 }
22713
22714 /* If the definition comes from the same place as the declaration,
22715 maybe use the old DIE. We always want the DIE for this function
22716 that has the *_pc attributes to be under comp_unit_die so the
22717 debugger can find it. We also need to do this for abstract
22718 instances of inlines, since the spec requires the out-of-line copy
22719 to have the same parent. For local class methods, this doesn't
22720 apply; we just use the old DIE. */
22721 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22722 struct dwarf_file_data * file_index = lookup_filename (s.file);
22723 if ((is_cu_die (old_die->die_parent)
22724 /* This condition fixes the inconsistency/ICE with the
22725 following Fortran test (or some derivative thereof) while
22726 building libgfortran:
22727
22728 module some_m
22729 contains
22730 logical function funky (FLAG)
22731 funky = .true.
22732 end function
22733 end module
22734 */
22735 || (old_die->die_parent
22736 && old_die->die_parent->die_tag == DW_TAG_module)
22737 || context_die == NULL)
22738 && (DECL_ARTIFICIAL (decl)
22739 /* The location attributes may be in the abstract origin
22740 which in the case of LTO might be not available to
22741 look at. */
22742 || get_AT (old_die, DW_AT_abstract_origin)
22743 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22744 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22745 == (unsigned) s.line)
22746 && (!debug_column_info
22747 || s.column == 0
22748 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22749 == (unsigned) s.column)))))
22750 {
22751 subr_die = old_die;
22752
22753 /* Clear out the declaration attribute, but leave the
22754 parameters so they can be augmented with location
22755 information later. Unless this was a declaration, in
22756 which case, wipe out the nameless parameters and recreate
22757 them further down. */
22758 if (remove_AT (subr_die, DW_AT_declaration))
22759 {
22760
22761 remove_AT (subr_die, DW_AT_object_pointer);
22762 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22763 }
22764 }
22765 /* Make a specification pointing to the previously built
22766 declaration. */
22767 else
22768 {
22769 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22770 add_AT_specification (subr_die, old_die);
22771 add_pubname (decl, subr_die);
22772 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22773 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22774 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22775 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22776 if (debug_column_info
22777 && s.column
22778 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22779 != (unsigned) s.column))
22780 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22781
22782 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22783 emit the real type on the definition die. */
22784 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22785 {
22786 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22787 if (die == auto_die || die == decltype_auto_die)
22788 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22789 TYPE_UNQUALIFIED, false, context_die);
22790 }
22791
22792 /* When we process the method declaration, we haven't seen
22793 the out-of-class defaulted definition yet, so we have to
22794 recheck now. */
22795 if ((dwarf_version >= 5 || ! dwarf_strict)
22796 && !get_AT (subr_die, DW_AT_defaulted))
22797 {
22798 int defaulted
22799 = lang_hooks.decls.decl_dwarf_attribute (decl,
22800 DW_AT_defaulted);
22801 if (defaulted != -1)
22802 {
22803 /* Other values must have been handled before. */
22804 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22805 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22806 }
22807 }
22808 }
22809 }
22810 /* Create a fresh DIE for anything else. */
22811 else
22812 {
22813 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22814
22815 if (TREE_PUBLIC (decl))
22816 add_AT_flag (subr_die, DW_AT_external, 1);
22817
22818 add_name_and_src_coords_attributes (subr_die, decl);
22819 add_pubname (decl, subr_die);
22820 if (debug_info_level > DINFO_LEVEL_TERSE)
22821 {
22822 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22823 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22824 TYPE_UNQUALIFIED, false, context_die);
22825 }
22826
22827 add_pure_or_virtual_attribute (subr_die, decl);
22828 if (DECL_ARTIFICIAL (decl))
22829 add_AT_flag (subr_die, DW_AT_artificial, 1);
22830
22831 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22832 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22833
22834 add_alignment_attribute (subr_die, decl);
22835
22836 add_accessibility_attribute (subr_die, decl);
22837 }
22838
22839 /* Unless we have an existing non-declaration DIE, equate the new
22840 DIE. */
22841 if (!old_die || is_declaration_die (old_die))
22842 equate_decl_number_to_die (decl, subr_die);
22843
22844 if (declaration)
22845 {
22846 if (!old_die || !get_AT (old_die, DW_AT_inline))
22847 {
22848 add_AT_flag (subr_die, DW_AT_declaration, 1);
22849
22850 /* If this is an explicit function declaration then generate
22851 a DW_AT_explicit attribute. */
22852 if ((dwarf_version >= 3 || !dwarf_strict)
22853 && lang_hooks.decls.decl_dwarf_attribute (decl,
22854 DW_AT_explicit) == 1)
22855 add_AT_flag (subr_die, DW_AT_explicit, 1);
22856
22857 /* If this is a C++11 deleted special function member then generate
22858 a DW_AT_deleted attribute. */
22859 if ((dwarf_version >= 5 || !dwarf_strict)
22860 && lang_hooks.decls.decl_dwarf_attribute (decl,
22861 DW_AT_deleted) == 1)
22862 add_AT_flag (subr_die, DW_AT_deleted, 1);
22863
22864 /* If this is a C++11 defaulted special function member then
22865 generate a DW_AT_defaulted attribute. */
22866 if (dwarf_version >= 5 || !dwarf_strict)
22867 {
22868 int defaulted
22869 = lang_hooks.decls.decl_dwarf_attribute (decl,
22870 DW_AT_defaulted);
22871 if (defaulted != -1)
22872 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22873 }
22874
22875 /* If this is a C++11 non-static member function with & ref-qualifier
22876 then generate a DW_AT_reference attribute. */
22877 if ((dwarf_version >= 5 || !dwarf_strict)
22878 && lang_hooks.decls.decl_dwarf_attribute (decl,
22879 DW_AT_reference) == 1)
22880 add_AT_flag (subr_die, DW_AT_reference, 1);
22881
22882 /* If this is a C++11 non-static member function with &&
22883 ref-qualifier then generate a DW_AT_reference attribute. */
22884 if ((dwarf_version >= 5 || !dwarf_strict)
22885 && lang_hooks.decls.decl_dwarf_attribute (decl,
22886 DW_AT_rvalue_reference)
22887 == 1)
22888 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22889 }
22890 }
22891 /* For non DECL_EXTERNALs, if range information is available, fill
22892 the DIE with it. */
22893 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22894 {
22895 HOST_WIDE_INT cfa_fb_offset;
22896
22897 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22898
22899 if (!crtl->has_bb_partition)
22900 {
22901 dw_fde_ref fde = fun->fde;
22902 if (fde->dw_fde_begin)
22903 {
22904 /* We have already generated the labels. */
22905 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22906 fde->dw_fde_end, false);
22907 }
22908 else
22909 {
22910 /* Create start/end labels and add the range. */
22911 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22912 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22913 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22914 current_function_funcdef_no);
22915 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22916 current_function_funcdef_no);
22917 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22918 false);
22919 }
22920
22921 #if VMS_DEBUGGING_INFO
22922 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22923 Section 2.3 Prologue and Epilogue Attributes:
22924 When a breakpoint is set on entry to a function, it is generally
22925 desirable for execution to be suspended, not on the very first
22926 instruction of the function, but rather at a point after the
22927 function's frame has been set up, after any language defined local
22928 declaration processing has been completed, and before execution of
22929 the first statement of the function begins. Debuggers generally
22930 cannot properly determine where this point is. Similarly for a
22931 breakpoint set on exit from a function. The prologue and epilogue
22932 attributes allow a compiler to communicate the location(s) to use. */
22933
22934 {
22935 if (fde->dw_fde_vms_end_prologue)
22936 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22937 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22938
22939 if (fde->dw_fde_vms_begin_epilogue)
22940 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22941 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22942 }
22943 #endif
22944
22945 }
22946 else
22947 {
22948 /* Generate pubnames entries for the split function code ranges. */
22949 dw_fde_ref fde = fun->fde;
22950
22951 if (fde->dw_fde_second_begin)
22952 {
22953 if (dwarf_version >= 3 || !dwarf_strict)
22954 {
22955 /* We should use ranges for non-contiguous code section
22956 addresses. Use the actual code range for the initial
22957 section, since the HOT/COLD labels might precede an
22958 alignment offset. */
22959 bool range_list_added = false;
22960 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22961 fde->dw_fde_end, &range_list_added,
22962 false);
22963 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22964 fde->dw_fde_second_end,
22965 &range_list_added, false);
22966 if (range_list_added)
22967 add_ranges (NULL);
22968 }
22969 else
22970 {
22971 /* There is no real support in DW2 for this .. so we make
22972 a work-around. First, emit the pub name for the segment
22973 containing the function label. Then make and emit a
22974 simplified subprogram DIE for the second segment with the
22975 name pre-fixed by __hot/cold_sect_of_. We use the same
22976 linkage name for the second die so that gdb will find both
22977 sections when given "b foo". */
22978 const char *name = NULL;
22979 tree decl_name = DECL_NAME (decl);
22980 dw_die_ref seg_die;
22981
22982 /* Do the 'primary' section. */
22983 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22984 fde->dw_fde_end, false);
22985
22986 /* Build a minimal DIE for the secondary section. */
22987 seg_die = new_die (DW_TAG_subprogram,
22988 subr_die->die_parent, decl);
22989
22990 if (TREE_PUBLIC (decl))
22991 add_AT_flag (seg_die, DW_AT_external, 1);
22992
22993 if (decl_name != NULL
22994 && IDENTIFIER_POINTER (decl_name) != NULL)
22995 {
22996 name = dwarf2_name (decl, 1);
22997 if (! DECL_ARTIFICIAL (decl))
22998 add_src_coords_attributes (seg_die, decl);
22999
23000 add_linkage_name (seg_die, decl);
23001 }
23002 gcc_assert (name != NULL);
23003 add_pure_or_virtual_attribute (seg_die, decl);
23004 if (DECL_ARTIFICIAL (decl))
23005 add_AT_flag (seg_die, DW_AT_artificial, 1);
23006
23007 name = concat ("__second_sect_of_", name, NULL);
23008 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23009 fde->dw_fde_second_end, false);
23010 add_name_attribute (seg_die, name);
23011 if (want_pubnames ())
23012 add_pubname_string (name, seg_die);
23013 }
23014 }
23015 else
23016 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23017 false);
23018 }
23019
23020 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23021
23022 /* We define the "frame base" as the function's CFA. This is more
23023 convenient for several reasons: (1) It's stable across the prologue
23024 and epilogue, which makes it better than just a frame pointer,
23025 (2) With dwarf3, there exists a one-byte encoding that allows us
23026 to reference the .debug_frame data by proxy, but failing that,
23027 (3) We can at least reuse the code inspection and interpretation
23028 code that determines the CFA position at various points in the
23029 function. */
23030 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23031 {
23032 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23033 add_AT_loc (subr_die, DW_AT_frame_base, op);
23034 }
23035 else
23036 {
23037 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23038 if (list->dw_loc_next)
23039 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23040 else
23041 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23042 }
23043
23044 /* Compute a displacement from the "steady-state frame pointer" to
23045 the CFA. The former is what all stack slots and argument slots
23046 will reference in the rtl; the latter is what we've told the
23047 debugger about. We'll need to adjust all frame_base references
23048 by this displacement. */
23049 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23050
23051 if (fun->static_chain_decl)
23052 {
23053 /* DWARF requires here a location expression that computes the
23054 address of the enclosing subprogram's frame base. The machinery
23055 in tree-nested.c is supposed to store this specific address in the
23056 last field of the FRAME record. */
23057 const tree frame_type
23058 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23059 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23060
23061 tree fb_expr
23062 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23063 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23064 fb_expr, fb_decl, NULL_TREE);
23065
23066 add_AT_location_description (subr_die, DW_AT_static_link,
23067 loc_list_from_tree (fb_expr, 0, NULL));
23068 }
23069
23070 resolve_variable_values ();
23071 }
23072
23073 /* Generate child dies for template paramaters. */
23074 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23075 gen_generic_params_dies (decl);
23076
23077 /* Now output descriptions of the arguments for this function. This gets
23078 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23079 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23080 `...' at the end of the formal parameter list. In order to find out if
23081 there was a trailing ellipsis or not, we must instead look at the type
23082 associated with the FUNCTION_DECL. This will be a node of type
23083 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23084 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23085 an ellipsis at the end. */
23086
23087 /* In the case where we are describing a mere function declaration, all we
23088 need to do here (and all we *can* do here) is to describe the *types* of
23089 its formal parameters. */
23090 if (debug_info_level <= DINFO_LEVEL_TERSE)
23091 ;
23092 else if (declaration)
23093 gen_formal_types_die (decl, subr_die);
23094 else
23095 {
23096 /* Generate DIEs to represent all known formal parameters. */
23097 tree parm = DECL_ARGUMENTS (decl);
23098 tree generic_decl = early_dwarf
23099 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23100 tree generic_decl_parm = generic_decl
23101 ? DECL_ARGUMENTS (generic_decl)
23102 : NULL;
23103
23104 /* Now we want to walk the list of parameters of the function and
23105 emit their relevant DIEs.
23106
23107 We consider the case of DECL being an instance of a generic function
23108 as well as it being a normal function.
23109
23110 If DECL is an instance of a generic function we walk the
23111 parameters of the generic function declaration _and_ the parameters of
23112 DECL itself. This is useful because we want to emit specific DIEs for
23113 function parameter packs and those are declared as part of the
23114 generic function declaration. In that particular case,
23115 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23116 That DIE has children DIEs representing the set of arguments
23117 of the pack. Note that the set of pack arguments can be empty.
23118 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23119 children DIE.
23120
23121 Otherwise, we just consider the parameters of DECL. */
23122 while (generic_decl_parm || parm)
23123 {
23124 if (generic_decl_parm
23125 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23126 gen_formal_parameter_pack_die (generic_decl_parm,
23127 parm, subr_die,
23128 &parm);
23129 else if (parm && !POINTER_BOUNDS_P (parm))
23130 {
23131 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23132
23133 if (early_dwarf
23134 && parm == DECL_ARGUMENTS (decl)
23135 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23136 && parm_die
23137 && (dwarf_version >= 3 || !dwarf_strict))
23138 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23139
23140 parm = DECL_CHAIN (parm);
23141 }
23142 else if (parm)
23143 parm = DECL_CHAIN (parm);
23144
23145 if (generic_decl_parm)
23146 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23147 }
23148
23149 /* Decide whether we need an unspecified_parameters DIE at the end.
23150 There are 2 more cases to do this for: 1) the ansi ... declaration -
23151 this is detectable when the end of the arg list is not a
23152 void_type_node 2) an unprototyped function declaration (not a
23153 definition). This just means that we have no info about the
23154 parameters at all. */
23155 if (early_dwarf)
23156 {
23157 if (prototype_p (TREE_TYPE (decl)))
23158 {
23159 /* This is the prototyped case, check for.... */
23160 if (stdarg_p (TREE_TYPE (decl)))
23161 gen_unspecified_parameters_die (decl, subr_die);
23162 }
23163 else if (DECL_INITIAL (decl) == NULL_TREE)
23164 gen_unspecified_parameters_die (decl, subr_die);
23165 }
23166 }
23167
23168 if (subr_die != old_die)
23169 /* Add the calling convention attribute if requested. */
23170 add_calling_convention_attribute (subr_die, decl);
23171
23172 /* Output Dwarf info for all of the stuff within the body of the function
23173 (if it has one - it may be just a declaration).
23174
23175 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23176 a function. This BLOCK actually represents the outermost binding contour
23177 for the function, i.e. the contour in which the function's formal
23178 parameters and labels get declared. Curiously, it appears that the front
23179 end doesn't actually put the PARM_DECL nodes for the current function onto
23180 the BLOCK_VARS list for this outer scope, but are strung off of the
23181 DECL_ARGUMENTS list for the function instead.
23182
23183 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23184 the LABEL_DECL nodes for the function however, and we output DWARF info
23185 for those in decls_for_scope. Just within the `outer_scope' there will be
23186 a BLOCK node representing the function's outermost pair of curly braces,
23187 and any blocks used for the base and member initializers of a C++
23188 constructor function. */
23189 tree outer_scope = DECL_INITIAL (decl);
23190 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23191 {
23192 int call_site_note_count = 0;
23193 int tail_call_site_note_count = 0;
23194
23195 /* Emit a DW_TAG_variable DIE for a named return value. */
23196 if (DECL_NAME (DECL_RESULT (decl)))
23197 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23198
23199 /* The first time through decls_for_scope we will generate the
23200 DIEs for the locals. The second time, we fill in the
23201 location info. */
23202 decls_for_scope (outer_scope, subr_die);
23203
23204 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23205 {
23206 struct call_arg_loc_node *ca_loc;
23207 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23208 {
23209 dw_die_ref die = NULL;
23210 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23211 rtx arg, next_arg;
23212
23213 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23214 ? XEXP (ca_loc->call_arg_loc_note, 0)
23215 : NULL_RTX);
23216 arg; arg = next_arg)
23217 {
23218 dw_loc_descr_ref reg, val;
23219 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23220 dw_die_ref cdie, tdie = NULL;
23221
23222 next_arg = XEXP (arg, 1);
23223 if (REG_P (XEXP (XEXP (arg, 0), 0))
23224 && next_arg
23225 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23226 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23227 && REGNO (XEXP (XEXP (arg, 0), 0))
23228 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23229 next_arg = XEXP (next_arg, 1);
23230 if (mode == VOIDmode)
23231 {
23232 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23233 if (mode == VOIDmode)
23234 mode = GET_MODE (XEXP (arg, 0));
23235 }
23236 if (mode == VOIDmode || mode == BLKmode)
23237 continue;
23238 /* Get dynamic information about call target only if we
23239 have no static information: we cannot generate both
23240 DW_AT_call_origin and DW_AT_call_target
23241 attributes. */
23242 if (ca_loc->symbol_ref == NULL_RTX)
23243 {
23244 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23245 {
23246 tloc = XEXP (XEXP (arg, 0), 1);
23247 continue;
23248 }
23249 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23250 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23251 {
23252 tlocc = XEXP (XEXP (arg, 0), 1);
23253 continue;
23254 }
23255 }
23256 reg = NULL;
23257 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23258 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23259 VAR_INIT_STATUS_INITIALIZED);
23260 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23261 {
23262 rtx mem = XEXP (XEXP (arg, 0), 0);
23263 reg = mem_loc_descriptor (XEXP (mem, 0),
23264 get_address_mode (mem),
23265 GET_MODE (mem),
23266 VAR_INIT_STATUS_INITIALIZED);
23267 }
23268 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23269 == DEBUG_PARAMETER_REF)
23270 {
23271 tree tdecl
23272 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23273 tdie = lookup_decl_die (tdecl);
23274 if (tdie == NULL)
23275 continue;
23276 }
23277 else
23278 continue;
23279 if (reg == NULL
23280 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23281 != DEBUG_PARAMETER_REF)
23282 continue;
23283 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23284 VOIDmode,
23285 VAR_INIT_STATUS_INITIALIZED);
23286 if (val == NULL)
23287 continue;
23288 if (die == NULL)
23289 die = gen_call_site_die (decl, subr_die, ca_loc);
23290 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23291 NULL_TREE);
23292 if (reg != NULL)
23293 add_AT_loc (cdie, DW_AT_location, reg);
23294 else if (tdie != NULL)
23295 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23296 tdie);
23297 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23298 if (next_arg != XEXP (arg, 1))
23299 {
23300 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23301 if (mode == VOIDmode)
23302 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23303 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23304 0), 1),
23305 mode, VOIDmode,
23306 VAR_INIT_STATUS_INITIALIZED);
23307 if (val != NULL)
23308 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23309 val);
23310 }
23311 }
23312 if (die == NULL
23313 && (ca_loc->symbol_ref || tloc))
23314 die = gen_call_site_die (decl, subr_die, ca_loc);
23315 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23316 {
23317 dw_loc_descr_ref tval = NULL;
23318
23319 if (tloc != NULL_RTX)
23320 tval = mem_loc_descriptor (tloc,
23321 GET_MODE (tloc) == VOIDmode
23322 ? Pmode : GET_MODE (tloc),
23323 VOIDmode,
23324 VAR_INIT_STATUS_INITIALIZED);
23325 if (tval)
23326 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23327 else if (tlocc != NULL_RTX)
23328 {
23329 tval = mem_loc_descriptor (tlocc,
23330 GET_MODE (tlocc) == VOIDmode
23331 ? Pmode : GET_MODE (tlocc),
23332 VOIDmode,
23333 VAR_INIT_STATUS_INITIALIZED);
23334 if (tval)
23335 add_AT_loc (die,
23336 dwarf_AT (DW_AT_call_target_clobbered),
23337 tval);
23338 }
23339 }
23340 if (die != NULL)
23341 {
23342 call_site_note_count++;
23343 if (ca_loc->tail_call_p)
23344 tail_call_site_note_count++;
23345 }
23346 }
23347 }
23348 call_arg_locations = NULL;
23349 call_arg_loc_last = NULL;
23350 if (tail_call_site_count >= 0
23351 && tail_call_site_count == tail_call_site_note_count
23352 && (!dwarf_strict || dwarf_version >= 5))
23353 {
23354 if (call_site_count >= 0
23355 && call_site_count == call_site_note_count)
23356 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23357 else
23358 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23359 }
23360 call_site_count = -1;
23361 tail_call_site_count = -1;
23362 }
23363
23364 /* Mark used types after we have created DIEs for the functions scopes. */
23365 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23366 }
23367
23368 /* Returns a hash value for X (which really is a die_struct). */
23369
23370 hashval_t
23371 block_die_hasher::hash (die_struct *d)
23372 {
23373 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23374 }
23375
23376 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23377 as decl_id and die_parent of die_struct Y. */
23378
23379 bool
23380 block_die_hasher::equal (die_struct *x, die_struct *y)
23381 {
23382 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23383 }
23384
23385 /* Hold information about markers for inlined entry points. */
23386 struct GTY ((for_user)) inline_entry_data
23387 {
23388 /* The block that's the inlined_function_outer_scope for an inlined
23389 function. */
23390 tree block;
23391
23392 /* The label at the inlined entry point. */
23393 const char *label_pfx;
23394 unsigned int label_num;
23395
23396 /* The view number to be used as the inlined entry point. */
23397 var_loc_view view;
23398 };
23399
23400 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23401 {
23402 typedef tree compare_type;
23403 static inline hashval_t hash (const inline_entry_data *);
23404 static inline bool equal (const inline_entry_data *, const_tree);
23405 };
23406
23407 /* Hash table routines for inline_entry_data. */
23408
23409 inline hashval_t
23410 inline_entry_data_hasher::hash (const inline_entry_data *data)
23411 {
23412 return htab_hash_pointer (data->block);
23413 }
23414
23415 inline bool
23416 inline_entry_data_hasher::equal (const inline_entry_data *data,
23417 const_tree block)
23418 {
23419 return data->block == block;
23420 }
23421
23422 /* Inlined entry points pending DIE creation in this compilation unit. */
23423
23424 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23425
23426
23427 /* Return TRUE if DECL, which may have been previously generated as
23428 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23429 true if decl (or its origin) is either an extern declaration or a
23430 class/namespace scoped declaration.
23431
23432 The declare_in_namespace support causes us to get two DIEs for one
23433 variable, both of which are declarations. We want to avoid
23434 considering one to be a specification, so we must test for
23435 DECLARATION and DW_AT_declaration. */
23436 static inline bool
23437 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23438 {
23439 return (old_die && TREE_STATIC (decl) && !declaration
23440 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23441 }
23442
23443 /* Return true if DECL is a local static. */
23444
23445 static inline bool
23446 local_function_static (tree decl)
23447 {
23448 gcc_assert (VAR_P (decl));
23449 return TREE_STATIC (decl)
23450 && DECL_CONTEXT (decl)
23451 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23452 }
23453
23454 /* Generate a DIE to represent a declared data object.
23455 Either DECL or ORIGIN must be non-null. */
23456
23457 static void
23458 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23459 {
23460 HOST_WIDE_INT off = 0;
23461 tree com_decl;
23462 tree decl_or_origin = decl ? decl : origin;
23463 tree ultimate_origin;
23464 dw_die_ref var_die;
23465 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23466 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23467 || class_or_namespace_scope_p (context_die));
23468 bool specialization_p = false;
23469 bool no_linkage_name = false;
23470
23471 /* While C++ inline static data members have definitions inside of the
23472 class, force the first DIE to be a declaration, then let gen_member_die
23473 reparent it to the class context and call gen_variable_die again
23474 to create the outside of the class DIE for the definition. */
23475 if (!declaration
23476 && old_die == NULL
23477 && decl
23478 && DECL_CONTEXT (decl)
23479 && TYPE_P (DECL_CONTEXT (decl))
23480 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23481 {
23482 declaration = true;
23483 if (dwarf_version < 5)
23484 no_linkage_name = true;
23485 }
23486
23487 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23488 if (decl || ultimate_origin)
23489 origin = ultimate_origin;
23490 com_decl = fortran_common (decl_or_origin, &off);
23491
23492 /* Symbol in common gets emitted as a child of the common block, in the form
23493 of a data member. */
23494 if (com_decl)
23495 {
23496 dw_die_ref com_die;
23497 dw_loc_list_ref loc = NULL;
23498 die_node com_die_arg;
23499
23500 var_die = lookup_decl_die (decl_or_origin);
23501 if (var_die)
23502 {
23503 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23504 {
23505 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23506 if (loc)
23507 {
23508 if (off)
23509 {
23510 /* Optimize the common case. */
23511 if (single_element_loc_list_p (loc)
23512 && loc->expr->dw_loc_opc == DW_OP_addr
23513 && loc->expr->dw_loc_next == NULL
23514 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23515 == SYMBOL_REF)
23516 {
23517 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23518 loc->expr->dw_loc_oprnd1.v.val_addr
23519 = plus_constant (GET_MODE (x), x , off);
23520 }
23521 else
23522 loc_list_plus_const (loc, off);
23523 }
23524 add_AT_location_description (var_die, DW_AT_location, loc);
23525 remove_AT (var_die, DW_AT_declaration);
23526 }
23527 }
23528 return;
23529 }
23530
23531 if (common_block_die_table == NULL)
23532 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23533
23534 com_die_arg.decl_id = DECL_UID (com_decl);
23535 com_die_arg.die_parent = context_die;
23536 com_die = common_block_die_table->find (&com_die_arg);
23537 if (! early_dwarf)
23538 loc = loc_list_from_tree (com_decl, 2, NULL);
23539 if (com_die == NULL)
23540 {
23541 const char *cnam
23542 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23543 die_node **slot;
23544
23545 com_die = new_die (DW_TAG_common_block, context_die, decl);
23546 add_name_and_src_coords_attributes (com_die, com_decl);
23547 if (loc)
23548 {
23549 add_AT_location_description (com_die, DW_AT_location, loc);
23550 /* Avoid sharing the same loc descriptor between
23551 DW_TAG_common_block and DW_TAG_variable. */
23552 loc = loc_list_from_tree (com_decl, 2, NULL);
23553 }
23554 else if (DECL_EXTERNAL (decl_or_origin))
23555 add_AT_flag (com_die, DW_AT_declaration, 1);
23556 if (want_pubnames ())
23557 add_pubname_string (cnam, com_die); /* ??? needed? */
23558 com_die->decl_id = DECL_UID (com_decl);
23559 slot = common_block_die_table->find_slot (com_die, INSERT);
23560 *slot = com_die;
23561 }
23562 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23563 {
23564 add_AT_location_description (com_die, DW_AT_location, loc);
23565 loc = loc_list_from_tree (com_decl, 2, NULL);
23566 remove_AT (com_die, DW_AT_declaration);
23567 }
23568 var_die = new_die (DW_TAG_variable, com_die, decl);
23569 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23570 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23571 decl_quals (decl_or_origin), false,
23572 context_die);
23573 add_alignment_attribute (var_die, decl);
23574 add_AT_flag (var_die, DW_AT_external, 1);
23575 if (loc)
23576 {
23577 if (off)
23578 {
23579 /* Optimize the common case. */
23580 if (single_element_loc_list_p (loc)
23581 && loc->expr->dw_loc_opc == DW_OP_addr
23582 && loc->expr->dw_loc_next == NULL
23583 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23584 {
23585 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23586 loc->expr->dw_loc_oprnd1.v.val_addr
23587 = plus_constant (GET_MODE (x), x, off);
23588 }
23589 else
23590 loc_list_plus_const (loc, off);
23591 }
23592 add_AT_location_description (var_die, DW_AT_location, loc);
23593 }
23594 else if (DECL_EXTERNAL (decl_or_origin))
23595 add_AT_flag (var_die, DW_AT_declaration, 1);
23596 if (decl)
23597 equate_decl_number_to_die (decl, var_die);
23598 return;
23599 }
23600
23601 if (old_die)
23602 {
23603 if (declaration)
23604 {
23605 /* A declaration that has been previously dumped, needs no
23606 further annotations, since it doesn't need location on
23607 the second pass. */
23608 return;
23609 }
23610 else if (decl_will_get_specification_p (old_die, decl, declaration)
23611 && !get_AT (old_die, DW_AT_specification))
23612 {
23613 /* Fall-thru so we can make a new variable die along with a
23614 DW_AT_specification. */
23615 }
23616 else if (origin && old_die->die_parent != context_die)
23617 {
23618 /* If we will be creating an inlined instance, we need a
23619 new DIE that will get annotated with
23620 DW_AT_abstract_origin. */
23621 gcc_assert (!DECL_ABSTRACT_P (decl));
23622 }
23623 else
23624 {
23625 /* If a DIE was dumped early, it still needs location info.
23626 Skip to where we fill the location bits. */
23627 var_die = old_die;
23628
23629 /* ??? In LTRANS we cannot annotate early created variably
23630 modified type DIEs without copying them and adjusting all
23631 references to them. Thus we dumped them again. Also add a
23632 reference to them but beware of -g0 compile and -g link
23633 in which case the reference will be already present. */
23634 tree type = TREE_TYPE (decl_or_origin);
23635 if (in_lto_p
23636 && ! get_AT (var_die, DW_AT_type)
23637 && variably_modified_type_p
23638 (type, decl_function_context (decl_or_origin)))
23639 {
23640 if (decl_by_reference_p (decl_or_origin))
23641 add_type_attribute (var_die, TREE_TYPE (type),
23642 TYPE_UNQUALIFIED, false, context_die);
23643 else
23644 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23645 false, context_die);
23646 }
23647
23648 goto gen_variable_die_location;
23649 }
23650 }
23651
23652 /* For static data members, the declaration in the class is supposed
23653 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23654 also in DWARF2; the specification should still be DW_TAG_variable
23655 referencing the DW_TAG_member DIE. */
23656 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23657 var_die = new_die (DW_TAG_member, context_die, decl);
23658 else
23659 var_die = new_die (DW_TAG_variable, context_die, decl);
23660
23661 if (origin != NULL)
23662 add_abstract_origin_attribute (var_die, origin);
23663
23664 /* Loop unrolling can create multiple blocks that refer to the same
23665 static variable, so we must test for the DW_AT_declaration flag.
23666
23667 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23668 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23669 sharing them.
23670
23671 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23672 else if (decl_will_get_specification_p (old_die, decl, declaration))
23673 {
23674 /* This is a definition of a C++ class level static. */
23675 add_AT_specification (var_die, old_die);
23676 specialization_p = true;
23677 if (DECL_NAME (decl))
23678 {
23679 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23680 struct dwarf_file_data * file_index = lookup_filename (s.file);
23681
23682 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23683 add_AT_file (var_die, DW_AT_decl_file, file_index);
23684
23685 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23686 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23687
23688 if (debug_column_info
23689 && s.column
23690 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23691 != (unsigned) s.column))
23692 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23693
23694 if (old_die->die_tag == DW_TAG_member)
23695 add_linkage_name (var_die, decl);
23696 }
23697 }
23698 else
23699 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23700
23701 if ((origin == NULL && !specialization_p)
23702 || (origin != NULL
23703 && !DECL_ABSTRACT_P (decl_or_origin)
23704 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23705 decl_function_context
23706 (decl_or_origin))))
23707 {
23708 tree type = TREE_TYPE (decl_or_origin);
23709
23710 if (decl_by_reference_p (decl_or_origin))
23711 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23712 context_die);
23713 else
23714 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23715 context_die);
23716 }
23717
23718 if (origin == NULL && !specialization_p)
23719 {
23720 if (TREE_PUBLIC (decl))
23721 add_AT_flag (var_die, DW_AT_external, 1);
23722
23723 if (DECL_ARTIFICIAL (decl))
23724 add_AT_flag (var_die, DW_AT_artificial, 1);
23725
23726 add_alignment_attribute (var_die, decl);
23727
23728 add_accessibility_attribute (var_die, decl);
23729 }
23730
23731 if (declaration)
23732 add_AT_flag (var_die, DW_AT_declaration, 1);
23733
23734 if (decl && (DECL_ABSTRACT_P (decl)
23735 || !old_die || is_declaration_die (old_die)))
23736 equate_decl_number_to_die (decl, var_die);
23737
23738 gen_variable_die_location:
23739 if (! declaration
23740 && (! DECL_ABSTRACT_P (decl_or_origin)
23741 /* Local static vars are shared between all clones/inlines,
23742 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23743 already set. */
23744 || (VAR_P (decl_or_origin)
23745 && TREE_STATIC (decl_or_origin)
23746 && DECL_RTL_SET_P (decl_or_origin))))
23747 {
23748 if (early_dwarf)
23749 add_pubname (decl_or_origin, var_die);
23750 else
23751 add_location_or_const_value_attribute (var_die, decl_or_origin,
23752 decl == NULL);
23753 }
23754 else
23755 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23756
23757 if ((dwarf_version >= 4 || !dwarf_strict)
23758 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23759 DW_AT_const_expr) == 1
23760 && !get_AT (var_die, DW_AT_const_expr)
23761 && !specialization_p)
23762 add_AT_flag (var_die, DW_AT_const_expr, 1);
23763
23764 if (!dwarf_strict)
23765 {
23766 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23767 DW_AT_inline);
23768 if (inl != -1
23769 && !get_AT (var_die, DW_AT_inline)
23770 && !specialization_p)
23771 add_AT_unsigned (var_die, DW_AT_inline, inl);
23772 }
23773 }
23774
23775 /* Generate a DIE to represent a named constant. */
23776
23777 static void
23778 gen_const_die (tree decl, dw_die_ref context_die)
23779 {
23780 dw_die_ref const_die;
23781 tree type = TREE_TYPE (decl);
23782
23783 const_die = lookup_decl_die (decl);
23784 if (const_die)
23785 return;
23786
23787 const_die = new_die (DW_TAG_constant, context_die, decl);
23788 equate_decl_number_to_die (decl, const_die);
23789 add_name_and_src_coords_attributes (const_die, decl);
23790 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23791 if (TREE_PUBLIC (decl))
23792 add_AT_flag (const_die, DW_AT_external, 1);
23793 if (DECL_ARTIFICIAL (decl))
23794 add_AT_flag (const_die, DW_AT_artificial, 1);
23795 tree_add_const_value_attribute_for_decl (const_die, decl);
23796 }
23797
23798 /* Generate a DIE to represent a label identifier. */
23799
23800 static void
23801 gen_label_die (tree decl, dw_die_ref context_die)
23802 {
23803 tree origin = decl_ultimate_origin (decl);
23804 dw_die_ref lbl_die = lookup_decl_die (decl);
23805 rtx insn;
23806 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23807
23808 if (!lbl_die)
23809 {
23810 lbl_die = new_die (DW_TAG_label, context_die, decl);
23811 equate_decl_number_to_die (decl, lbl_die);
23812
23813 if (origin != NULL)
23814 add_abstract_origin_attribute (lbl_die, origin);
23815 else
23816 add_name_and_src_coords_attributes (lbl_die, decl);
23817 }
23818
23819 if (DECL_ABSTRACT_P (decl))
23820 equate_decl_number_to_die (decl, lbl_die);
23821 else if (! early_dwarf)
23822 {
23823 insn = DECL_RTL_IF_SET (decl);
23824
23825 /* Deleted labels are programmer specified labels which have been
23826 eliminated because of various optimizations. We still emit them
23827 here so that it is possible to put breakpoints on them. */
23828 if (insn
23829 && (LABEL_P (insn)
23830 || ((NOTE_P (insn)
23831 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23832 {
23833 /* When optimization is enabled (via -O) some parts of the compiler
23834 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23835 represent source-level labels which were explicitly declared by
23836 the user. This really shouldn't be happening though, so catch
23837 it if it ever does happen. */
23838 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23839
23840 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23841 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23842 }
23843 else if (insn
23844 && NOTE_P (insn)
23845 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23846 && CODE_LABEL_NUMBER (insn) != -1)
23847 {
23848 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23849 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23850 }
23851 }
23852 }
23853
23854 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23855 attributes to the DIE for a block STMT, to describe where the inlined
23856 function was called from. This is similar to add_src_coords_attributes. */
23857
23858 static inline void
23859 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23860 {
23861 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23862
23863 if (dwarf_version >= 3 || !dwarf_strict)
23864 {
23865 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23866 add_AT_unsigned (die, DW_AT_call_line, s.line);
23867 if (debug_column_info && s.column)
23868 add_AT_unsigned (die, DW_AT_call_column, s.column);
23869 }
23870 }
23871
23872
23873 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23874 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23875
23876 static inline void
23877 add_high_low_attributes (tree stmt, dw_die_ref die)
23878 {
23879 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23880
23881 if (inline_entry_data **iedp
23882 = !inline_entry_data_table ? NULL
23883 : inline_entry_data_table->find_slot_with_hash (stmt,
23884 htab_hash_pointer (stmt),
23885 NO_INSERT))
23886 {
23887 inline_entry_data *ied = *iedp;
23888 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23889 gcc_assert (debug_inline_points);
23890 gcc_assert (inlined_function_outer_scope_p (stmt));
23891
23892 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23893 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23894
23895 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23896 && !dwarf_strict)
23897 {
23898 if (!output_asm_line_debug_info ())
23899 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23900 else
23901 {
23902 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23903 /* FIXME: this will resolve to a small number. Could we
23904 possibly emit smaller data? Ideally we'd emit a
23905 uleb128, but that would make the size of DIEs
23906 impossible for the compiler to compute, since it's
23907 the assembler that computes the value of the view
23908 label in this case. Ideally, we'd have a single form
23909 encompassing both the address and the view, and
23910 indirecting them through a table might make things
23911 easier, but even that would be more wasteful,
23912 space-wise, than what we have now. */
23913 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23914 }
23915 }
23916
23917 inline_entry_data_table->clear_slot (iedp);
23918 }
23919
23920 if (BLOCK_FRAGMENT_CHAIN (stmt)
23921 && (dwarf_version >= 3 || !dwarf_strict))
23922 {
23923 tree chain, superblock = NULL_TREE;
23924 dw_die_ref pdie;
23925 dw_attr_node *attr = NULL;
23926
23927 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23928 {
23929 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23930 BLOCK_NUMBER (stmt));
23931 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23932 }
23933
23934 /* Optimize duplicate .debug_ranges lists or even tails of
23935 lists. If this BLOCK has same ranges as its supercontext,
23936 lookup DW_AT_ranges attribute in the supercontext (and
23937 recursively so), verify that the ranges_table contains the
23938 right values and use it instead of adding a new .debug_range. */
23939 for (chain = stmt, pdie = die;
23940 BLOCK_SAME_RANGE (chain);
23941 chain = BLOCK_SUPERCONTEXT (chain))
23942 {
23943 dw_attr_node *new_attr;
23944
23945 pdie = pdie->die_parent;
23946 if (pdie == NULL)
23947 break;
23948 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23949 break;
23950 new_attr = get_AT (pdie, DW_AT_ranges);
23951 if (new_attr == NULL
23952 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23953 break;
23954 attr = new_attr;
23955 superblock = BLOCK_SUPERCONTEXT (chain);
23956 }
23957 if (attr != NULL
23958 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23959 == BLOCK_NUMBER (superblock))
23960 && BLOCK_FRAGMENT_CHAIN (superblock))
23961 {
23962 unsigned long off = attr->dw_attr_val.v.val_offset;
23963 unsigned long supercnt = 0, thiscnt = 0;
23964 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23965 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23966 {
23967 ++supercnt;
23968 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23969 == BLOCK_NUMBER (chain));
23970 }
23971 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23972 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23973 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23974 ++thiscnt;
23975 gcc_assert (supercnt >= thiscnt);
23976 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23977 false);
23978 note_rnglist_head (off + supercnt - thiscnt);
23979 return;
23980 }
23981
23982 unsigned int offset = add_ranges (stmt, true);
23983 add_AT_range_list (die, DW_AT_ranges, offset, false);
23984 note_rnglist_head (offset);
23985
23986 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23987 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23988 do
23989 {
23990 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23991 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23992 chain = BLOCK_FRAGMENT_CHAIN (chain);
23993 }
23994 while (chain);
23995 add_ranges (NULL);
23996 }
23997 else
23998 {
23999 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24000 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24001 BLOCK_NUMBER (stmt));
24002 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24003 BLOCK_NUMBER (stmt));
24004 add_AT_low_high_pc (die, label, label_high, false);
24005 }
24006 }
24007
24008 /* Generate a DIE for a lexical block. */
24009
24010 static void
24011 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24012 {
24013 dw_die_ref old_die = BLOCK_DIE (stmt);
24014 dw_die_ref stmt_die = NULL;
24015 if (!old_die)
24016 {
24017 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24018 BLOCK_DIE (stmt) = stmt_die;
24019 }
24020
24021 if (BLOCK_ABSTRACT (stmt))
24022 {
24023 if (old_die)
24024 {
24025 /* This must have been generated early and it won't even
24026 need location information since it's a DW_AT_inline
24027 function. */
24028 if (flag_checking)
24029 for (dw_die_ref c = context_die; c; c = c->die_parent)
24030 if (c->die_tag == DW_TAG_inlined_subroutine
24031 || c->die_tag == DW_TAG_subprogram)
24032 {
24033 gcc_assert (get_AT (c, DW_AT_inline));
24034 break;
24035 }
24036 return;
24037 }
24038 }
24039 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24040 {
24041 /* If this is an inlined instance, create a new lexical die for
24042 anything below to attach DW_AT_abstract_origin to. */
24043 if (old_die)
24044 {
24045 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24046 BLOCK_DIE (stmt) = stmt_die;
24047 old_die = NULL;
24048 }
24049
24050 tree origin = block_ultimate_origin (stmt);
24051 if (origin != NULL_TREE && origin != stmt)
24052 add_abstract_origin_attribute (stmt_die, origin);
24053 }
24054
24055 if (old_die)
24056 stmt_die = old_die;
24057
24058 /* A non abstract block whose blocks have already been reordered
24059 should have the instruction range for this block. If so, set the
24060 high/low attributes. */
24061 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24062 {
24063 gcc_assert (stmt_die);
24064 add_high_low_attributes (stmt, stmt_die);
24065 }
24066
24067 decls_for_scope (stmt, stmt_die);
24068 }
24069
24070 /* Generate a DIE for an inlined subprogram. */
24071
24072 static void
24073 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24074 {
24075 tree decl;
24076
24077 /* The instance of function that is effectively being inlined shall not
24078 be abstract. */
24079 gcc_assert (! BLOCK_ABSTRACT (stmt));
24080
24081 decl = block_ultimate_origin (stmt);
24082
24083 /* Make sure any inlined functions are known to be inlineable. */
24084 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24085 || cgraph_function_possibly_inlined_p (decl));
24086
24087 if (! BLOCK_ABSTRACT (stmt))
24088 {
24089 dw_die_ref subr_die
24090 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24091
24092 if (call_arg_locations || debug_inline_points)
24093 BLOCK_DIE (stmt) = subr_die;
24094 add_abstract_origin_attribute (subr_die, decl);
24095 if (TREE_ASM_WRITTEN (stmt))
24096 add_high_low_attributes (stmt, subr_die);
24097 add_call_src_coords_attributes (stmt, subr_die);
24098
24099 decls_for_scope (stmt, subr_die);
24100 }
24101 }
24102
24103 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24104 the comment for VLR_CONTEXT. */
24105
24106 static void
24107 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24108 {
24109 dw_die_ref decl_die;
24110
24111 if (TREE_TYPE (decl) == error_mark_node)
24112 return;
24113
24114 decl_die = new_die (DW_TAG_member, context_die, decl);
24115 add_name_and_src_coords_attributes (decl_die, decl);
24116 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24117 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24118 context_die);
24119
24120 if (DECL_BIT_FIELD_TYPE (decl))
24121 {
24122 add_byte_size_attribute (decl_die, decl);
24123 add_bit_size_attribute (decl_die, decl);
24124 add_bit_offset_attribute (decl_die, decl, ctx);
24125 }
24126
24127 add_alignment_attribute (decl_die, decl);
24128
24129 /* If we have a variant part offset, then we are supposed to process a member
24130 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24131 trees. */
24132 gcc_assert (ctx->variant_part_offset == NULL_TREE
24133 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24134 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24135 add_data_member_location_attribute (decl_die, decl, ctx);
24136
24137 if (DECL_ARTIFICIAL (decl))
24138 add_AT_flag (decl_die, DW_AT_artificial, 1);
24139
24140 add_accessibility_attribute (decl_die, decl);
24141
24142 /* Equate decl number to die, so that we can look up this decl later on. */
24143 equate_decl_number_to_die (decl, decl_die);
24144 }
24145
24146 /* Generate a DIE for a pointer to a member type. TYPE can be an
24147 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24148 pointer to member function. */
24149
24150 static void
24151 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24152 {
24153 if (lookup_type_die (type))
24154 return;
24155
24156 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24157 scope_die_for (type, context_die), type);
24158
24159 equate_type_number_to_die (type, ptr_die);
24160 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24161 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24162 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24163 context_die);
24164 add_alignment_attribute (ptr_die, type);
24165
24166 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24167 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24168 {
24169 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24170 add_AT_loc (ptr_die, DW_AT_use_location, op);
24171 }
24172 }
24173
24174 static char *producer_string;
24175
24176 /* Return a heap allocated producer string including command line options
24177 if -grecord-gcc-switches. */
24178
24179 static char *
24180 gen_producer_string (void)
24181 {
24182 size_t j;
24183 auto_vec<const char *> switches;
24184 const char *language_string = lang_hooks.name;
24185 char *producer, *tail;
24186 const char *p;
24187 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24188 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24189
24190 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24191 switch (save_decoded_options[j].opt_index)
24192 {
24193 case OPT_o:
24194 case OPT_d:
24195 case OPT_dumpbase:
24196 case OPT_dumpdir:
24197 case OPT_auxbase:
24198 case OPT_auxbase_strip:
24199 case OPT_quiet:
24200 case OPT_version:
24201 case OPT_v:
24202 case OPT_w:
24203 case OPT_L:
24204 case OPT_D:
24205 case OPT_I:
24206 case OPT_U:
24207 case OPT_SPECIAL_unknown:
24208 case OPT_SPECIAL_ignore:
24209 case OPT_SPECIAL_program_name:
24210 case OPT_SPECIAL_input_file:
24211 case OPT_grecord_gcc_switches:
24212 case OPT__output_pch_:
24213 case OPT_fdiagnostics_show_location_:
24214 case OPT_fdiagnostics_show_option:
24215 case OPT_fdiagnostics_show_caret:
24216 case OPT_fdiagnostics_color_:
24217 case OPT_fverbose_asm:
24218 case OPT____:
24219 case OPT__sysroot_:
24220 case OPT_nostdinc:
24221 case OPT_nostdinc__:
24222 case OPT_fpreprocessed:
24223 case OPT_fltrans_output_list_:
24224 case OPT_fresolution_:
24225 case OPT_fdebug_prefix_map_:
24226 case OPT_fmacro_prefix_map_:
24227 case OPT_ffile_prefix_map_:
24228 case OPT_fcompare_debug:
24229 /* Ignore these. */
24230 continue;
24231 default:
24232 if (cl_options[save_decoded_options[j].opt_index].flags
24233 & CL_NO_DWARF_RECORD)
24234 continue;
24235 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24236 == '-');
24237 switch (save_decoded_options[j].canonical_option[0][1])
24238 {
24239 case 'M':
24240 case 'i':
24241 case 'W':
24242 continue;
24243 case 'f':
24244 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24245 "dump", 4) == 0)
24246 continue;
24247 break;
24248 default:
24249 break;
24250 }
24251 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24252 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24253 break;
24254 }
24255
24256 producer = XNEWVEC (char, plen + 1 + len + 1);
24257 tail = producer;
24258 sprintf (tail, "%s %s", language_string, version_string);
24259 tail += plen;
24260
24261 FOR_EACH_VEC_ELT (switches, j, p)
24262 {
24263 len = strlen (p);
24264 *tail = ' ';
24265 memcpy (tail + 1, p, len);
24266 tail += len + 1;
24267 }
24268
24269 *tail = '\0';
24270 return producer;
24271 }
24272
24273 /* Given a C and/or C++ language/version string return the "highest".
24274 C++ is assumed to be "higher" than C in this case. Used for merging
24275 LTO translation unit languages. */
24276 static const char *
24277 highest_c_language (const char *lang1, const char *lang2)
24278 {
24279 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24280 return "GNU C++17";
24281 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24282 return "GNU C++14";
24283 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24284 return "GNU C++11";
24285 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24286 return "GNU C++98";
24287
24288 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24289 return "GNU C17";
24290 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24291 return "GNU C11";
24292 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24293 return "GNU C99";
24294 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24295 return "GNU C89";
24296
24297 gcc_unreachable ();
24298 }
24299
24300
24301 /* Generate the DIE for the compilation unit. */
24302
24303 static dw_die_ref
24304 gen_compile_unit_die (const char *filename)
24305 {
24306 dw_die_ref die;
24307 const char *language_string = lang_hooks.name;
24308 int language;
24309
24310 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24311
24312 if (filename)
24313 {
24314 add_name_attribute (die, filename);
24315 /* Don't add cwd for <built-in>. */
24316 if (filename[0] != '<')
24317 add_comp_dir_attribute (die);
24318 }
24319
24320 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24321
24322 /* If our producer is LTO try to figure out a common language to use
24323 from the global list of translation units. */
24324 if (strcmp (language_string, "GNU GIMPLE") == 0)
24325 {
24326 unsigned i;
24327 tree t;
24328 const char *common_lang = NULL;
24329
24330 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24331 {
24332 if (!TRANSLATION_UNIT_LANGUAGE (t))
24333 continue;
24334 if (!common_lang)
24335 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24336 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24337 ;
24338 else if (strncmp (common_lang, "GNU C", 5) == 0
24339 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24340 /* Mixing C and C++ is ok, use C++ in that case. */
24341 common_lang = highest_c_language (common_lang,
24342 TRANSLATION_UNIT_LANGUAGE (t));
24343 else
24344 {
24345 /* Fall back to C. */
24346 common_lang = NULL;
24347 break;
24348 }
24349 }
24350
24351 if (common_lang)
24352 language_string = common_lang;
24353 }
24354
24355 language = DW_LANG_C;
24356 if (strncmp (language_string, "GNU C", 5) == 0
24357 && ISDIGIT (language_string[5]))
24358 {
24359 language = DW_LANG_C89;
24360 if (dwarf_version >= 3 || !dwarf_strict)
24361 {
24362 if (strcmp (language_string, "GNU C89") != 0)
24363 language = DW_LANG_C99;
24364
24365 if (dwarf_version >= 5 /* || !dwarf_strict */)
24366 if (strcmp (language_string, "GNU C11") == 0
24367 || strcmp (language_string, "GNU C17") == 0)
24368 language = DW_LANG_C11;
24369 }
24370 }
24371 else if (strncmp (language_string, "GNU C++", 7) == 0)
24372 {
24373 language = DW_LANG_C_plus_plus;
24374 if (dwarf_version >= 5 /* || !dwarf_strict */)
24375 {
24376 if (strcmp (language_string, "GNU C++11") == 0)
24377 language = DW_LANG_C_plus_plus_11;
24378 else if (strcmp (language_string, "GNU C++14") == 0)
24379 language = DW_LANG_C_plus_plus_14;
24380 else if (strcmp (language_string, "GNU C++17") == 0)
24381 /* For now. */
24382 language = DW_LANG_C_plus_plus_14;
24383 }
24384 }
24385 else if (strcmp (language_string, "GNU F77") == 0)
24386 language = DW_LANG_Fortran77;
24387 else if (dwarf_version >= 3 || !dwarf_strict)
24388 {
24389 if (strcmp (language_string, "GNU Ada") == 0)
24390 language = DW_LANG_Ada95;
24391 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24392 {
24393 language = DW_LANG_Fortran95;
24394 if (dwarf_version >= 5 /* || !dwarf_strict */)
24395 {
24396 if (strcmp (language_string, "GNU Fortran2003") == 0)
24397 language = DW_LANG_Fortran03;
24398 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24399 language = DW_LANG_Fortran08;
24400 }
24401 }
24402 else if (strcmp (language_string, "GNU Objective-C") == 0)
24403 language = DW_LANG_ObjC;
24404 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24405 language = DW_LANG_ObjC_plus_plus;
24406 else if (dwarf_version >= 5 || !dwarf_strict)
24407 {
24408 if (strcmp (language_string, "GNU Go") == 0)
24409 language = DW_LANG_Go;
24410 }
24411 }
24412 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24413 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24414 language = DW_LANG_Fortran90;
24415
24416 add_AT_unsigned (die, DW_AT_language, language);
24417
24418 switch (language)
24419 {
24420 case DW_LANG_Fortran77:
24421 case DW_LANG_Fortran90:
24422 case DW_LANG_Fortran95:
24423 case DW_LANG_Fortran03:
24424 case DW_LANG_Fortran08:
24425 /* Fortran has case insensitive identifiers and the front-end
24426 lowercases everything. */
24427 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24428 break;
24429 default:
24430 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24431 break;
24432 }
24433 return die;
24434 }
24435
24436 /* Generate the DIE for a base class. */
24437
24438 static void
24439 gen_inheritance_die (tree binfo, tree access, tree type,
24440 dw_die_ref context_die)
24441 {
24442 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24443 struct vlr_context ctx = { type, NULL };
24444
24445 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24446 context_die);
24447 add_data_member_location_attribute (die, binfo, &ctx);
24448
24449 if (BINFO_VIRTUAL_P (binfo))
24450 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24451
24452 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24453 children, otherwise the default is DW_ACCESS_public. In DWARF2
24454 the default has always been DW_ACCESS_private. */
24455 if (access == access_public_node)
24456 {
24457 if (dwarf_version == 2
24458 || context_die->die_tag == DW_TAG_class_type)
24459 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24460 }
24461 else if (access == access_protected_node)
24462 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24463 else if (dwarf_version > 2
24464 && context_die->die_tag != DW_TAG_class_type)
24465 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24466 }
24467
24468 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24469 structure. */
24470 static bool
24471 is_variant_part (tree decl)
24472 {
24473 return (TREE_CODE (decl) == FIELD_DECL
24474 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24475 }
24476
24477 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24478 return the FIELD_DECL. Return NULL_TREE otherwise. */
24479
24480 static tree
24481 analyze_discr_in_predicate (tree operand, tree struct_type)
24482 {
24483 bool continue_stripping = true;
24484 while (continue_stripping)
24485 switch (TREE_CODE (operand))
24486 {
24487 CASE_CONVERT:
24488 operand = TREE_OPERAND (operand, 0);
24489 break;
24490 default:
24491 continue_stripping = false;
24492 break;
24493 }
24494
24495 /* Match field access to members of struct_type only. */
24496 if (TREE_CODE (operand) == COMPONENT_REF
24497 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24498 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24499 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24500 return TREE_OPERAND (operand, 1);
24501 else
24502 return NULL_TREE;
24503 }
24504
24505 /* Check that SRC is a constant integer that can be represented as a native
24506 integer constant (either signed or unsigned). If so, store it into DEST and
24507 return true. Return false otherwise. */
24508
24509 static bool
24510 get_discr_value (tree src, dw_discr_value *dest)
24511 {
24512 tree discr_type = TREE_TYPE (src);
24513
24514 if (lang_hooks.types.get_debug_type)
24515 {
24516 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24517 if (debug_type != NULL)
24518 discr_type = debug_type;
24519 }
24520
24521 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24522 return false;
24523
24524 /* Signedness can vary between the original type and the debug type. This
24525 can happen for character types in Ada for instance: the character type
24526 used for code generation can be signed, to be compatible with the C one,
24527 but from a debugger point of view, it must be unsigned. */
24528 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24529 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24530
24531 if (is_orig_unsigned != is_debug_unsigned)
24532 src = fold_convert (discr_type, src);
24533
24534 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24535 return false;
24536
24537 dest->pos = is_debug_unsigned;
24538 if (is_debug_unsigned)
24539 dest->v.uval = tree_to_uhwi (src);
24540 else
24541 dest->v.sval = tree_to_shwi (src);
24542
24543 return true;
24544 }
24545
24546 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24547 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24548 store NULL_TREE in DISCR_DECL. Otherwise:
24549
24550 - store the discriminant field in STRUCT_TYPE that controls the variant
24551 part to *DISCR_DECL
24552
24553 - put in *DISCR_LISTS_P an array where for each variant, the item
24554 represents the corresponding matching list of discriminant values.
24555
24556 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24557 the above array.
24558
24559 Note that when the array is allocated (i.e. when the analysis is
24560 successful), it is up to the caller to free the array. */
24561
24562 static void
24563 analyze_variants_discr (tree variant_part_decl,
24564 tree struct_type,
24565 tree *discr_decl,
24566 dw_discr_list_ref **discr_lists_p,
24567 unsigned *discr_lists_length)
24568 {
24569 tree variant_part_type = TREE_TYPE (variant_part_decl);
24570 tree variant;
24571 dw_discr_list_ref *discr_lists;
24572 unsigned i;
24573
24574 /* Compute how many variants there are in this variant part. */
24575 *discr_lists_length = 0;
24576 for (variant = TYPE_FIELDS (variant_part_type);
24577 variant != NULL_TREE;
24578 variant = DECL_CHAIN (variant))
24579 ++*discr_lists_length;
24580
24581 *discr_decl = NULL_TREE;
24582 *discr_lists_p
24583 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24584 sizeof (**discr_lists_p));
24585 discr_lists = *discr_lists_p;
24586
24587 /* And then analyze all variants to extract discriminant information for all
24588 of them. This analysis is conservative: as soon as we detect something we
24589 do not support, abort everything and pretend we found nothing. */
24590 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24591 variant != NULL_TREE;
24592 variant = DECL_CHAIN (variant), ++i)
24593 {
24594 tree match_expr = DECL_QUALIFIER (variant);
24595
24596 /* Now, try to analyze the predicate and deduce a discriminant for
24597 it. */
24598 if (match_expr == boolean_true_node)
24599 /* Typically happens for the default variant: it matches all cases that
24600 previous variants rejected. Don't output any matching value for
24601 this one. */
24602 continue;
24603
24604 /* The following loop tries to iterate over each discriminant
24605 possibility: single values or ranges. */
24606 while (match_expr != NULL_TREE)
24607 {
24608 tree next_round_match_expr;
24609 tree candidate_discr = NULL_TREE;
24610 dw_discr_list_ref new_node = NULL;
24611
24612 /* Possibilities are matched one after the other by nested
24613 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24614 continue with the rest at next iteration. */
24615 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24616 {
24617 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24618 match_expr = TREE_OPERAND (match_expr, 1);
24619 }
24620 else
24621 next_round_match_expr = NULL_TREE;
24622
24623 if (match_expr == boolean_false_node)
24624 /* This sub-expression matches nothing: just wait for the next
24625 one. */
24626 ;
24627
24628 else if (TREE_CODE (match_expr) == EQ_EXPR)
24629 {
24630 /* We are matching: <discr_field> == <integer_cst>
24631 This sub-expression matches a single value. */
24632 tree integer_cst = TREE_OPERAND (match_expr, 1);
24633
24634 candidate_discr
24635 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24636 struct_type);
24637
24638 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24639 if (!get_discr_value (integer_cst,
24640 &new_node->dw_discr_lower_bound))
24641 goto abort;
24642 new_node->dw_discr_range = false;
24643 }
24644
24645 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24646 {
24647 /* We are matching:
24648 <discr_field> > <integer_cst>
24649 && <discr_field> < <integer_cst>.
24650 This sub-expression matches the range of values between the
24651 two matched integer constants. Note that comparisons can be
24652 inclusive or exclusive. */
24653 tree candidate_discr_1, candidate_discr_2;
24654 tree lower_cst, upper_cst;
24655 bool lower_cst_included, upper_cst_included;
24656 tree lower_op = TREE_OPERAND (match_expr, 0);
24657 tree upper_op = TREE_OPERAND (match_expr, 1);
24658
24659 /* When the comparison is exclusive, the integer constant is not
24660 the discriminant range bound we are looking for: we will have
24661 to increment or decrement it. */
24662 if (TREE_CODE (lower_op) == GE_EXPR)
24663 lower_cst_included = true;
24664 else if (TREE_CODE (lower_op) == GT_EXPR)
24665 lower_cst_included = false;
24666 else
24667 goto abort;
24668
24669 if (TREE_CODE (upper_op) == LE_EXPR)
24670 upper_cst_included = true;
24671 else if (TREE_CODE (upper_op) == LT_EXPR)
24672 upper_cst_included = false;
24673 else
24674 goto abort;
24675
24676 /* Extract the discriminant from the first operand and check it
24677 is consistant with the same analysis in the second
24678 operand. */
24679 candidate_discr_1
24680 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24681 struct_type);
24682 candidate_discr_2
24683 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24684 struct_type);
24685 if (candidate_discr_1 == candidate_discr_2)
24686 candidate_discr = candidate_discr_1;
24687 else
24688 goto abort;
24689
24690 /* Extract bounds from both. */
24691 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24692 lower_cst = TREE_OPERAND (lower_op, 1);
24693 upper_cst = TREE_OPERAND (upper_op, 1);
24694
24695 if (!lower_cst_included)
24696 lower_cst
24697 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24698 build_int_cst (TREE_TYPE (lower_cst), 1));
24699 if (!upper_cst_included)
24700 upper_cst
24701 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24702 build_int_cst (TREE_TYPE (upper_cst), 1));
24703
24704 if (!get_discr_value (lower_cst,
24705 &new_node->dw_discr_lower_bound)
24706 || !get_discr_value (upper_cst,
24707 &new_node->dw_discr_upper_bound))
24708 goto abort;
24709
24710 new_node->dw_discr_range = true;
24711 }
24712
24713 else
24714 /* Unsupported sub-expression: we cannot determine the set of
24715 matching discriminant values. Abort everything. */
24716 goto abort;
24717
24718 /* If the discriminant info is not consistant with what we saw so
24719 far, consider the analysis failed and abort everything. */
24720 if (candidate_discr == NULL_TREE
24721 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24722 goto abort;
24723 else
24724 *discr_decl = candidate_discr;
24725
24726 if (new_node != NULL)
24727 {
24728 new_node->dw_discr_next = discr_lists[i];
24729 discr_lists[i] = new_node;
24730 }
24731 match_expr = next_round_match_expr;
24732 }
24733 }
24734
24735 /* If we reach this point, we could match everything we were interested
24736 in. */
24737 return;
24738
24739 abort:
24740 /* Clean all data structure and return no result. */
24741 free (*discr_lists_p);
24742 *discr_lists_p = NULL;
24743 *discr_decl = NULL_TREE;
24744 }
24745
24746 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24747 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24748 under CONTEXT_DIE.
24749
24750 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24751 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24752 this type, which are record types, represent the available variants and each
24753 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24754 values are inferred from these attributes.
24755
24756 In trees, the offsets for the fields inside these sub-records are relative
24757 to the variant part itself, whereas the corresponding DIEs should have
24758 offset attributes that are relative to the embedding record base address.
24759 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24760 must be an expression that computes the offset of the variant part to
24761 describe in DWARF. */
24762
24763 static void
24764 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24765 dw_die_ref context_die)
24766 {
24767 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24768 tree variant_part_offset = vlr_ctx->variant_part_offset;
24769 struct loc_descr_context ctx = {
24770 vlr_ctx->struct_type, /* context_type */
24771 NULL_TREE, /* base_decl */
24772 NULL, /* dpi */
24773 false, /* placeholder_arg */
24774 false /* placeholder_seen */
24775 };
24776
24777 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24778 NULL_TREE if there is no such field. */
24779 tree discr_decl = NULL_TREE;
24780 dw_discr_list_ref *discr_lists;
24781 unsigned discr_lists_length = 0;
24782 unsigned i;
24783
24784 dw_die_ref dwarf_proc_die = NULL;
24785 dw_die_ref variant_part_die
24786 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24787
24788 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24789
24790 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24791 &discr_decl, &discr_lists, &discr_lists_length);
24792
24793 if (discr_decl != NULL_TREE)
24794 {
24795 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24796
24797 if (discr_die)
24798 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24799 else
24800 /* We have no DIE for the discriminant, so just discard all
24801 discrimimant information in the output. */
24802 discr_decl = NULL_TREE;
24803 }
24804
24805 /* If the offset for this variant part is more complex than a constant,
24806 create a DWARF procedure for it so that we will not have to generate DWARF
24807 expressions for it for each member. */
24808 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24809 && (dwarf_version >= 3 || !dwarf_strict))
24810 {
24811 const tree dwarf_proc_fndecl
24812 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24813 build_function_type (TREE_TYPE (variant_part_offset),
24814 NULL_TREE));
24815 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24816 const dw_loc_descr_ref dwarf_proc_body
24817 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24818
24819 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24820 dwarf_proc_fndecl, context_die);
24821 if (dwarf_proc_die != NULL)
24822 variant_part_offset = dwarf_proc_call;
24823 }
24824
24825 /* Output DIEs for all variants. */
24826 i = 0;
24827 for (tree variant = TYPE_FIELDS (variant_part_type);
24828 variant != NULL_TREE;
24829 variant = DECL_CHAIN (variant), ++i)
24830 {
24831 tree variant_type = TREE_TYPE (variant);
24832 dw_die_ref variant_die;
24833
24834 /* All variants (i.e. members of a variant part) are supposed to be
24835 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24836 under these records. */
24837 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24838
24839 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24840 equate_decl_number_to_die (variant, variant_die);
24841
24842 /* Output discriminant values this variant matches, if any. */
24843 if (discr_decl == NULL || discr_lists[i] == NULL)
24844 /* In the case we have discriminant information at all, this is
24845 probably the default variant: as the standard says, don't
24846 output any discriminant value/list attribute. */
24847 ;
24848 else if (discr_lists[i]->dw_discr_next == NULL
24849 && !discr_lists[i]->dw_discr_range)
24850 /* If there is only one accepted value, don't bother outputting a
24851 list. */
24852 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24853 else
24854 add_discr_list (variant_die, discr_lists[i]);
24855
24856 for (tree member = TYPE_FIELDS (variant_type);
24857 member != NULL_TREE;
24858 member = DECL_CHAIN (member))
24859 {
24860 struct vlr_context vlr_sub_ctx = {
24861 vlr_ctx->struct_type, /* struct_type */
24862 NULL /* variant_part_offset */
24863 };
24864 if (is_variant_part (member))
24865 {
24866 /* All offsets for fields inside variant parts are relative to
24867 the top-level embedding RECORD_TYPE's base address. On the
24868 other hand, offsets in GCC's types are relative to the
24869 nested-most variant part. So we have to sum offsets each time
24870 we recurse. */
24871
24872 vlr_sub_ctx.variant_part_offset
24873 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24874 variant_part_offset, byte_position (member));
24875 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24876 }
24877 else
24878 {
24879 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24880 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24881 }
24882 }
24883 }
24884
24885 free (discr_lists);
24886 }
24887
24888 /* Generate a DIE for a class member. */
24889
24890 static void
24891 gen_member_die (tree type, dw_die_ref context_die)
24892 {
24893 tree member;
24894 tree binfo = TYPE_BINFO (type);
24895
24896 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24897
24898 /* If this is not an incomplete type, output descriptions of each of its
24899 members. Note that as we output the DIEs necessary to represent the
24900 members of this record or union type, we will also be trying to output
24901 DIEs to represent the *types* of those members. However the `type'
24902 function (above) will specifically avoid generating type DIEs for member
24903 types *within* the list of member DIEs for this (containing) type except
24904 for those types (of members) which are explicitly marked as also being
24905 members of this (containing) type themselves. The g++ front- end can
24906 force any given type to be treated as a member of some other (containing)
24907 type by setting the TYPE_CONTEXT of the given (member) type to point to
24908 the TREE node representing the appropriate (containing) type. */
24909
24910 /* First output info about the base classes. */
24911 if (binfo)
24912 {
24913 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24914 int i;
24915 tree base;
24916
24917 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24918 gen_inheritance_die (base,
24919 (accesses ? (*accesses)[i] : access_public_node),
24920 type,
24921 context_die);
24922 }
24923
24924 /* Now output info about the data members and type members. */
24925 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24926 {
24927 struct vlr_context vlr_ctx = { type, NULL_TREE };
24928 bool static_inline_p
24929 = (TREE_STATIC (member)
24930 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24931 != -1));
24932
24933 /* Ignore clones. */
24934 if (DECL_ABSTRACT_ORIGIN (member))
24935 continue;
24936
24937 /* If we thought we were generating minimal debug info for TYPE
24938 and then changed our minds, some of the member declarations
24939 may have already been defined. Don't define them again, but
24940 do put them in the right order. */
24941
24942 if (dw_die_ref child = lookup_decl_die (member))
24943 {
24944 /* Handle inline static data members, which only have in-class
24945 declarations. */
24946 dw_die_ref ref = NULL;
24947 if (child->die_tag == DW_TAG_variable
24948 && child->die_parent == comp_unit_die ())
24949 {
24950 ref = get_AT_ref (child, DW_AT_specification);
24951 /* For C++17 inline static data members followed by redundant
24952 out of class redeclaration, we might get here with
24953 child being the DIE created for the out of class
24954 redeclaration and with its DW_AT_specification being
24955 the DIE created for in-class definition. We want to
24956 reparent the latter, and don't want to create another
24957 DIE with DW_AT_specification in that case, because
24958 we already have one. */
24959 if (ref
24960 && static_inline_p
24961 && ref->die_tag == DW_TAG_variable
24962 && ref->die_parent == comp_unit_die ()
24963 && get_AT (ref, DW_AT_specification) == NULL)
24964 {
24965 child = ref;
24966 ref = NULL;
24967 static_inline_p = false;
24968 }
24969 }
24970
24971 if (child->die_tag == DW_TAG_variable
24972 && child->die_parent == comp_unit_die ()
24973 && ref == NULL)
24974 {
24975 reparent_child (child, context_die);
24976 if (dwarf_version < 5)
24977 child->die_tag = DW_TAG_member;
24978 }
24979 else
24980 splice_child_die (context_die, child);
24981 }
24982
24983 /* Do not generate standard DWARF for variant parts if we are generating
24984 the corresponding GNAT encodings: DIEs generated for both would
24985 conflict in our mappings. */
24986 else if (is_variant_part (member)
24987 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24988 {
24989 vlr_ctx.variant_part_offset = byte_position (member);
24990 gen_variant_part (member, &vlr_ctx, context_die);
24991 }
24992 else
24993 {
24994 vlr_ctx.variant_part_offset = NULL_TREE;
24995 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24996 }
24997
24998 /* For C++ inline static data members emit immediately a DW_TAG_variable
24999 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25000 DW_AT_specification. */
25001 if (static_inline_p)
25002 {
25003 int old_extern = DECL_EXTERNAL (member);
25004 DECL_EXTERNAL (member) = 0;
25005 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25006 DECL_EXTERNAL (member) = old_extern;
25007 }
25008 }
25009 }
25010
25011 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25012 is set, we pretend that the type was never defined, so we only get the
25013 member DIEs needed by later specification DIEs. */
25014
25015 static void
25016 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25017 enum debug_info_usage usage)
25018 {
25019 if (TREE_ASM_WRITTEN (type))
25020 {
25021 /* Fill in the bound of variable-length fields in late dwarf if
25022 still incomplete. */
25023 if (!early_dwarf && variably_modified_type_p (type, NULL))
25024 for (tree member = TYPE_FIELDS (type);
25025 member;
25026 member = DECL_CHAIN (member))
25027 fill_variable_array_bounds (TREE_TYPE (member));
25028 return;
25029 }
25030
25031 dw_die_ref type_die = lookup_type_die (type);
25032 dw_die_ref scope_die = 0;
25033 int nested = 0;
25034 int complete = (TYPE_SIZE (type)
25035 && (! TYPE_STUB_DECL (type)
25036 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25037 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25038 complete = complete && should_emit_struct_debug (type, usage);
25039
25040 if (type_die && ! complete)
25041 return;
25042
25043 if (TYPE_CONTEXT (type) != NULL_TREE
25044 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25045 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25046 nested = 1;
25047
25048 scope_die = scope_die_for (type, context_die);
25049
25050 /* Generate child dies for template paramaters. */
25051 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25052 schedule_generic_params_dies_gen (type);
25053
25054 if (! type_die || (nested && is_cu_die (scope_die)))
25055 /* First occurrence of type or toplevel definition of nested class. */
25056 {
25057 dw_die_ref old_die = type_die;
25058
25059 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25060 ? record_type_tag (type) : DW_TAG_union_type,
25061 scope_die, type);
25062 equate_type_number_to_die (type, type_die);
25063 if (old_die)
25064 add_AT_specification (type_die, old_die);
25065 else
25066 add_name_attribute (type_die, type_tag (type));
25067 }
25068 else
25069 remove_AT (type_die, DW_AT_declaration);
25070
25071 /* If this type has been completed, then give it a byte_size attribute and
25072 then give a list of members. */
25073 if (complete && !ns_decl)
25074 {
25075 /* Prevent infinite recursion in cases where the type of some member of
25076 this type is expressed in terms of this type itself. */
25077 TREE_ASM_WRITTEN (type) = 1;
25078 add_byte_size_attribute (type_die, type);
25079 add_alignment_attribute (type_die, type);
25080 if (TYPE_STUB_DECL (type) != NULL_TREE)
25081 {
25082 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25083 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25084 }
25085
25086 /* If the first reference to this type was as the return type of an
25087 inline function, then it may not have a parent. Fix this now. */
25088 if (type_die->die_parent == NULL)
25089 add_child_die (scope_die, type_die);
25090
25091 push_decl_scope (type);
25092 gen_member_die (type, type_die);
25093 pop_decl_scope ();
25094
25095 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25096 if (TYPE_ARTIFICIAL (type))
25097 add_AT_flag (type_die, DW_AT_artificial, 1);
25098
25099 /* GNU extension: Record what type our vtable lives in. */
25100 if (TYPE_VFIELD (type))
25101 {
25102 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25103
25104 gen_type_die (vtype, context_die);
25105 add_AT_die_ref (type_die, DW_AT_containing_type,
25106 lookup_type_die (vtype));
25107 }
25108 }
25109 else
25110 {
25111 add_AT_flag (type_die, DW_AT_declaration, 1);
25112
25113 /* We don't need to do this for function-local types. */
25114 if (TYPE_STUB_DECL (type)
25115 && ! decl_function_context (TYPE_STUB_DECL (type)))
25116 vec_safe_push (incomplete_types, type);
25117 }
25118
25119 if (get_AT (type_die, DW_AT_name))
25120 add_pubtype (type, type_die);
25121 }
25122
25123 /* Generate a DIE for a subroutine _type_. */
25124
25125 static void
25126 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25127 {
25128 tree return_type = TREE_TYPE (type);
25129 dw_die_ref subr_die
25130 = new_die (DW_TAG_subroutine_type,
25131 scope_die_for (type, context_die), type);
25132
25133 equate_type_number_to_die (type, subr_die);
25134 add_prototyped_attribute (subr_die, type);
25135 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25136 context_die);
25137 add_alignment_attribute (subr_die, type);
25138 gen_formal_types_die (type, subr_die);
25139
25140 if (get_AT (subr_die, DW_AT_name))
25141 add_pubtype (type, subr_die);
25142 if ((dwarf_version >= 5 || !dwarf_strict)
25143 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25144 add_AT_flag (subr_die, DW_AT_reference, 1);
25145 if ((dwarf_version >= 5 || !dwarf_strict)
25146 && lang_hooks.types.type_dwarf_attribute (type,
25147 DW_AT_rvalue_reference) != -1)
25148 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25149 }
25150
25151 /* Generate a DIE for a type definition. */
25152
25153 static void
25154 gen_typedef_die (tree decl, dw_die_ref context_die)
25155 {
25156 dw_die_ref type_die;
25157 tree type;
25158
25159 if (TREE_ASM_WRITTEN (decl))
25160 {
25161 if (DECL_ORIGINAL_TYPE (decl))
25162 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25163 return;
25164 }
25165
25166 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25167 checks in process_scope_var and modified_type_die), this should be called
25168 only for original types. */
25169 gcc_assert (decl_ultimate_origin (decl) == NULL
25170 || decl_ultimate_origin (decl) == decl);
25171
25172 TREE_ASM_WRITTEN (decl) = 1;
25173 type_die = new_die (DW_TAG_typedef, context_die, decl);
25174
25175 add_name_and_src_coords_attributes (type_die, decl);
25176 if (DECL_ORIGINAL_TYPE (decl))
25177 {
25178 type = DECL_ORIGINAL_TYPE (decl);
25179 if (type == error_mark_node)
25180 return;
25181
25182 gcc_assert (type != TREE_TYPE (decl));
25183 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25184 }
25185 else
25186 {
25187 type = TREE_TYPE (decl);
25188 if (type == error_mark_node)
25189 return;
25190
25191 if (is_naming_typedef_decl (TYPE_NAME (type)))
25192 {
25193 /* Here, we are in the case of decl being a typedef naming
25194 an anonymous type, e.g:
25195 typedef struct {...} foo;
25196 In that case TREE_TYPE (decl) is not a typedef variant
25197 type and TYPE_NAME of the anonymous type is set to the
25198 TYPE_DECL of the typedef. This construct is emitted by
25199 the C++ FE.
25200
25201 TYPE is the anonymous struct named by the typedef
25202 DECL. As we need the DW_AT_type attribute of the
25203 DW_TAG_typedef to point to the DIE of TYPE, let's
25204 generate that DIE right away. add_type_attribute
25205 called below will then pick (via lookup_type_die) that
25206 anonymous struct DIE. */
25207 if (!TREE_ASM_WRITTEN (type))
25208 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25209
25210 /* This is a GNU Extension. We are adding a
25211 DW_AT_linkage_name attribute to the DIE of the
25212 anonymous struct TYPE. The value of that attribute
25213 is the name of the typedef decl naming the anonymous
25214 struct. This greatly eases the work of consumers of
25215 this debug info. */
25216 add_linkage_name_raw (lookup_type_die (type), decl);
25217 }
25218 }
25219
25220 add_type_attribute (type_die, type, decl_quals (decl), false,
25221 context_die);
25222
25223 if (is_naming_typedef_decl (decl))
25224 /* We want that all subsequent calls to lookup_type_die with
25225 TYPE in argument yield the DW_TAG_typedef we have just
25226 created. */
25227 equate_type_number_to_die (type, type_die);
25228
25229 add_alignment_attribute (type_die, TREE_TYPE (decl));
25230
25231 add_accessibility_attribute (type_die, decl);
25232
25233 if (DECL_ABSTRACT_P (decl))
25234 equate_decl_number_to_die (decl, type_die);
25235
25236 if (get_AT (type_die, DW_AT_name))
25237 add_pubtype (decl, type_die);
25238 }
25239
25240 /* Generate a DIE for a struct, class, enum or union type. */
25241
25242 static void
25243 gen_tagged_type_die (tree type,
25244 dw_die_ref context_die,
25245 enum debug_info_usage usage)
25246 {
25247 int need_pop;
25248
25249 if (type == NULL_TREE
25250 || !is_tagged_type (type))
25251 return;
25252
25253 if (TREE_ASM_WRITTEN (type))
25254 need_pop = 0;
25255 /* If this is a nested type whose containing class hasn't been written
25256 out yet, writing it out will cover this one, too. This does not apply
25257 to instantiations of member class templates; they need to be added to
25258 the containing class as they are generated. FIXME: This hurts the
25259 idea of combining type decls from multiple TUs, since we can't predict
25260 what set of template instantiations we'll get. */
25261 else if (TYPE_CONTEXT (type)
25262 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25263 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25264 {
25265 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25266
25267 if (TREE_ASM_WRITTEN (type))
25268 return;
25269
25270 /* If that failed, attach ourselves to the stub. */
25271 push_decl_scope (TYPE_CONTEXT (type));
25272 context_die = lookup_type_die (TYPE_CONTEXT (type));
25273 need_pop = 1;
25274 }
25275 else if (TYPE_CONTEXT (type) != NULL_TREE
25276 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25277 {
25278 /* If this type is local to a function that hasn't been written
25279 out yet, use a NULL context for now; it will be fixed up in
25280 decls_for_scope. */
25281 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25282 /* A declaration DIE doesn't count; nested types need to go in the
25283 specification. */
25284 if (context_die && is_declaration_die (context_die))
25285 context_die = NULL;
25286 need_pop = 0;
25287 }
25288 else
25289 {
25290 context_die = declare_in_namespace (type, context_die);
25291 need_pop = 0;
25292 }
25293
25294 if (TREE_CODE (type) == ENUMERAL_TYPE)
25295 {
25296 /* This might have been written out by the call to
25297 declare_in_namespace. */
25298 if (!TREE_ASM_WRITTEN (type))
25299 gen_enumeration_type_die (type, context_die);
25300 }
25301 else
25302 gen_struct_or_union_type_die (type, context_die, usage);
25303
25304 if (need_pop)
25305 pop_decl_scope ();
25306
25307 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25308 it up if it is ever completed. gen_*_type_die will set it for us
25309 when appropriate. */
25310 }
25311
25312 /* Generate a type description DIE. */
25313
25314 static void
25315 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25316 enum debug_info_usage usage)
25317 {
25318 struct array_descr_info info;
25319
25320 if (type == NULL_TREE || type == error_mark_node)
25321 return;
25322
25323 if (flag_checking && type)
25324 verify_type (type);
25325
25326 if (TYPE_NAME (type) != NULL_TREE
25327 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25328 && is_redundant_typedef (TYPE_NAME (type))
25329 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25330 /* The DECL of this type is a typedef we don't want to emit debug
25331 info for but we want debug info for its underlying typedef.
25332 This can happen for e.g, the injected-class-name of a C++
25333 type. */
25334 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25335
25336 /* If TYPE is a typedef type variant, let's generate debug info
25337 for the parent typedef which TYPE is a type of. */
25338 if (typedef_variant_p (type))
25339 {
25340 if (TREE_ASM_WRITTEN (type))
25341 return;
25342
25343 tree name = TYPE_NAME (type);
25344 tree origin = decl_ultimate_origin (name);
25345 if (origin != NULL && origin != name)
25346 {
25347 gen_decl_die (origin, NULL, NULL, context_die);
25348 return;
25349 }
25350
25351 /* Prevent broken recursion; we can't hand off to the same type. */
25352 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25353
25354 /* Give typedefs the right scope. */
25355 context_die = scope_die_for (type, context_die);
25356
25357 TREE_ASM_WRITTEN (type) = 1;
25358
25359 gen_decl_die (name, NULL, NULL, context_die);
25360 return;
25361 }
25362
25363 /* If type is an anonymous tagged type named by a typedef, let's
25364 generate debug info for the typedef. */
25365 if (is_naming_typedef_decl (TYPE_NAME (type)))
25366 {
25367 /* Use the DIE of the containing namespace as the parent DIE of
25368 the type description DIE we want to generate. */
25369 if (DECL_CONTEXT (TYPE_NAME (type))
25370 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25371 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25372
25373 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25374 return;
25375 }
25376
25377 if (lang_hooks.types.get_debug_type)
25378 {
25379 tree debug_type = lang_hooks.types.get_debug_type (type);
25380
25381 if (debug_type != NULL_TREE && debug_type != type)
25382 {
25383 gen_type_die_with_usage (debug_type, context_die, usage);
25384 return;
25385 }
25386 }
25387
25388 /* We are going to output a DIE to represent the unqualified version
25389 of this type (i.e. without any const or volatile qualifiers) so
25390 get the main variant (i.e. the unqualified version) of this type
25391 now. (Vectors and arrays are special because the debugging info is in the
25392 cloned type itself. Similarly function/method types can contain extra
25393 ref-qualification). */
25394 if (TREE_CODE (type) == FUNCTION_TYPE
25395 || TREE_CODE (type) == METHOD_TYPE)
25396 {
25397 /* For function/method types, can't use type_main_variant here,
25398 because that can have different ref-qualifiers for C++,
25399 but try to canonicalize. */
25400 tree main = TYPE_MAIN_VARIANT (type);
25401 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25402 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25403 && check_base_type (t, main)
25404 && check_lang_type (t, type))
25405 {
25406 type = t;
25407 break;
25408 }
25409 }
25410 else if (TREE_CODE (type) != VECTOR_TYPE
25411 && TREE_CODE (type) != ARRAY_TYPE)
25412 type = type_main_variant (type);
25413
25414 /* If this is an array type with hidden descriptor, handle it first. */
25415 if (!TREE_ASM_WRITTEN (type)
25416 && lang_hooks.types.get_array_descr_info)
25417 {
25418 memset (&info, 0, sizeof (info));
25419 if (lang_hooks.types.get_array_descr_info (type, &info))
25420 {
25421 /* Fortran sometimes emits array types with no dimension. */
25422 gcc_assert (info.ndimensions >= 0
25423 && (info.ndimensions
25424 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25425 gen_descr_array_type_die (type, &info, context_die);
25426 TREE_ASM_WRITTEN (type) = 1;
25427 return;
25428 }
25429 }
25430
25431 if (TREE_ASM_WRITTEN (type))
25432 {
25433 /* Variable-length types may be incomplete even if
25434 TREE_ASM_WRITTEN. For such types, fall through to
25435 gen_array_type_die() and possibly fill in
25436 DW_AT_{upper,lower}_bound attributes. */
25437 if ((TREE_CODE (type) != ARRAY_TYPE
25438 && TREE_CODE (type) != RECORD_TYPE
25439 && TREE_CODE (type) != UNION_TYPE
25440 && TREE_CODE (type) != QUAL_UNION_TYPE)
25441 || !variably_modified_type_p (type, NULL))
25442 return;
25443 }
25444
25445 switch (TREE_CODE (type))
25446 {
25447 case ERROR_MARK:
25448 break;
25449
25450 case POINTER_TYPE:
25451 case REFERENCE_TYPE:
25452 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25453 ensures that the gen_type_die recursion will terminate even if the
25454 type is recursive. Recursive types are possible in Ada. */
25455 /* ??? We could perhaps do this for all types before the switch
25456 statement. */
25457 TREE_ASM_WRITTEN (type) = 1;
25458
25459 /* For these types, all that is required is that we output a DIE (or a
25460 set of DIEs) to represent the "basis" type. */
25461 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25462 DINFO_USAGE_IND_USE);
25463 break;
25464
25465 case OFFSET_TYPE:
25466 /* This code is used for C++ pointer-to-data-member types.
25467 Output a description of the relevant class type. */
25468 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25469 DINFO_USAGE_IND_USE);
25470
25471 /* Output a description of the type of the object pointed to. */
25472 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25473 DINFO_USAGE_IND_USE);
25474
25475 /* Now output a DIE to represent this pointer-to-data-member type
25476 itself. */
25477 gen_ptr_to_mbr_type_die (type, context_die);
25478 break;
25479
25480 case FUNCTION_TYPE:
25481 /* Force out return type (in case it wasn't forced out already). */
25482 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25483 DINFO_USAGE_DIR_USE);
25484 gen_subroutine_type_die (type, context_die);
25485 break;
25486
25487 case METHOD_TYPE:
25488 /* Force out return type (in case it wasn't forced out already). */
25489 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25490 DINFO_USAGE_DIR_USE);
25491 gen_subroutine_type_die (type, context_die);
25492 break;
25493
25494 case ARRAY_TYPE:
25495 case VECTOR_TYPE:
25496 gen_array_type_die (type, context_die);
25497 break;
25498
25499 case ENUMERAL_TYPE:
25500 case RECORD_TYPE:
25501 case UNION_TYPE:
25502 case QUAL_UNION_TYPE:
25503 gen_tagged_type_die (type, context_die, usage);
25504 return;
25505
25506 case VOID_TYPE:
25507 case INTEGER_TYPE:
25508 case REAL_TYPE:
25509 case FIXED_POINT_TYPE:
25510 case COMPLEX_TYPE:
25511 case BOOLEAN_TYPE:
25512 case POINTER_BOUNDS_TYPE:
25513 /* No DIEs needed for fundamental types. */
25514 break;
25515
25516 case NULLPTR_TYPE:
25517 case LANG_TYPE:
25518 /* Just use DW_TAG_unspecified_type. */
25519 {
25520 dw_die_ref type_die = lookup_type_die (type);
25521 if (type_die == NULL)
25522 {
25523 tree name = TYPE_IDENTIFIER (type);
25524 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25525 type);
25526 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25527 equate_type_number_to_die (type, type_die);
25528 }
25529 }
25530 break;
25531
25532 default:
25533 if (is_cxx_auto (type))
25534 {
25535 tree name = TYPE_IDENTIFIER (type);
25536 dw_die_ref *die = (name == get_identifier ("auto")
25537 ? &auto_die : &decltype_auto_die);
25538 if (!*die)
25539 {
25540 *die = new_die (DW_TAG_unspecified_type,
25541 comp_unit_die (), NULL_TREE);
25542 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25543 }
25544 equate_type_number_to_die (type, *die);
25545 break;
25546 }
25547 gcc_unreachable ();
25548 }
25549
25550 TREE_ASM_WRITTEN (type) = 1;
25551 }
25552
25553 static void
25554 gen_type_die (tree type, dw_die_ref context_die)
25555 {
25556 if (type != error_mark_node)
25557 {
25558 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25559 if (flag_checking)
25560 {
25561 dw_die_ref die = lookup_type_die (type);
25562 if (die)
25563 check_die (die);
25564 }
25565 }
25566 }
25567
25568 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25569 things which are local to the given block. */
25570
25571 static void
25572 gen_block_die (tree stmt, dw_die_ref context_die)
25573 {
25574 int must_output_die = 0;
25575 bool inlined_func;
25576
25577 /* Ignore blocks that are NULL. */
25578 if (stmt == NULL_TREE)
25579 return;
25580
25581 inlined_func = inlined_function_outer_scope_p (stmt);
25582
25583 /* If the block is one fragment of a non-contiguous block, do not
25584 process the variables, since they will have been done by the
25585 origin block. Do process subblocks. */
25586 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25587 {
25588 tree sub;
25589
25590 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25591 gen_block_die (sub, context_die);
25592
25593 return;
25594 }
25595
25596 /* Determine if we need to output any Dwarf DIEs at all to represent this
25597 block. */
25598 if (inlined_func)
25599 /* The outer scopes for inlinings *must* always be represented. We
25600 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25601 must_output_die = 1;
25602 else
25603 {
25604 /* Determine if this block directly contains any "significant"
25605 local declarations which we will need to output DIEs for. */
25606 if (debug_info_level > DINFO_LEVEL_TERSE)
25607 /* We are not in terse mode so *any* local declaration counts
25608 as being a "significant" one. */
25609 must_output_die = ((BLOCK_VARS (stmt) != NULL
25610 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25611 && (TREE_USED (stmt)
25612 || TREE_ASM_WRITTEN (stmt)
25613 || BLOCK_ABSTRACT (stmt)));
25614 else if ((TREE_USED (stmt)
25615 || TREE_ASM_WRITTEN (stmt)
25616 || BLOCK_ABSTRACT (stmt))
25617 && !dwarf2out_ignore_block (stmt))
25618 must_output_die = 1;
25619 }
25620
25621 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25622 DIE for any block which contains no significant local declarations at
25623 all. Rather, in such cases we just call `decls_for_scope' so that any
25624 needed Dwarf info for any sub-blocks will get properly generated. Note
25625 that in terse mode, our definition of what constitutes a "significant"
25626 local declaration gets restricted to include only inlined function
25627 instances and local (nested) function definitions. */
25628 if (must_output_die)
25629 {
25630 if (inlined_func)
25631 {
25632 /* If STMT block is abstract, that means we have been called
25633 indirectly from dwarf2out_abstract_function.
25634 That function rightfully marks the descendent blocks (of
25635 the abstract function it is dealing with) as being abstract,
25636 precisely to prevent us from emitting any
25637 DW_TAG_inlined_subroutine DIE as a descendent
25638 of an abstract function instance. So in that case, we should
25639 not call gen_inlined_subroutine_die.
25640
25641 Later though, when cgraph asks dwarf2out to emit info
25642 for the concrete instance of the function decl into which
25643 the concrete instance of STMT got inlined, the later will lead
25644 to the generation of a DW_TAG_inlined_subroutine DIE. */
25645 if (! BLOCK_ABSTRACT (stmt))
25646 gen_inlined_subroutine_die (stmt, context_die);
25647 }
25648 else
25649 gen_lexical_block_die (stmt, context_die);
25650 }
25651 else
25652 decls_for_scope (stmt, context_die);
25653 }
25654
25655 /* Process variable DECL (or variable with origin ORIGIN) within
25656 block STMT and add it to CONTEXT_DIE. */
25657 static void
25658 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25659 {
25660 dw_die_ref die;
25661 tree decl_or_origin = decl ? decl : origin;
25662
25663 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25664 die = lookup_decl_die (decl_or_origin);
25665 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25666 {
25667 if (TYPE_DECL_IS_STUB (decl_or_origin))
25668 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25669 else
25670 die = lookup_decl_die (decl_or_origin);
25671 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25672 if (! die && ! early_dwarf)
25673 return;
25674 }
25675 else
25676 die = NULL;
25677
25678 /* Avoid creating DIEs for local typedefs and concrete static variables that
25679 will only be pruned later. */
25680 if ((origin || decl_ultimate_origin (decl))
25681 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25682 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25683 {
25684 origin = decl_ultimate_origin (decl_or_origin);
25685 if (decl && VAR_P (decl) && die != NULL)
25686 {
25687 die = lookup_decl_die (origin);
25688 if (die != NULL)
25689 equate_decl_number_to_die (decl, die);
25690 }
25691 return;
25692 }
25693
25694 if (die != NULL && die->die_parent == NULL)
25695 add_child_die (context_die, die);
25696 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25697 {
25698 if (early_dwarf)
25699 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25700 stmt, context_die);
25701 }
25702 else
25703 {
25704 if (decl && DECL_P (decl))
25705 {
25706 die = lookup_decl_die (decl);
25707
25708 /* Early created DIEs do not have a parent as the decls refer
25709 to the function as DECL_CONTEXT rather than the BLOCK. */
25710 if (die && die->die_parent == NULL)
25711 {
25712 gcc_assert (in_lto_p);
25713 add_child_die (context_die, die);
25714 }
25715 }
25716
25717 gen_decl_die (decl, origin, NULL, context_die);
25718 }
25719 }
25720
25721 /* Generate all of the decls declared within a given scope and (recursively)
25722 all of its sub-blocks. */
25723
25724 static void
25725 decls_for_scope (tree stmt, dw_die_ref context_die)
25726 {
25727 tree decl;
25728 unsigned int i;
25729 tree subblocks;
25730
25731 /* Ignore NULL blocks. */
25732 if (stmt == NULL_TREE)
25733 return;
25734
25735 /* Output the DIEs to represent all of the data objects and typedefs
25736 declared directly within this block but not within any nested
25737 sub-blocks. Also, nested function and tag DIEs have been
25738 generated with a parent of NULL; fix that up now. We don't
25739 have to do this if we're at -g1. */
25740 if (debug_info_level > DINFO_LEVEL_TERSE)
25741 {
25742 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25743 process_scope_var (stmt, decl, NULL_TREE, context_die);
25744 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25745 origin - avoid doing this twice as we have no good way to see
25746 if we've done it once already. */
25747 if (! early_dwarf)
25748 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25749 {
25750 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25751 if (decl == current_function_decl)
25752 /* Ignore declarations of the current function, while they
25753 are declarations, gen_subprogram_die would treat them
25754 as definitions again, because they are equal to
25755 current_function_decl and endlessly recurse. */;
25756 else if (TREE_CODE (decl) == FUNCTION_DECL)
25757 process_scope_var (stmt, decl, NULL_TREE, context_die);
25758 else
25759 process_scope_var (stmt, NULL_TREE, decl, context_die);
25760 }
25761 }
25762
25763 /* Even if we're at -g1, we need to process the subblocks in order to get
25764 inlined call information. */
25765
25766 /* Output the DIEs to represent all sub-blocks (and the items declared
25767 therein) of this block. */
25768 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25769 subblocks != NULL;
25770 subblocks = BLOCK_CHAIN (subblocks))
25771 gen_block_die (subblocks, context_die);
25772 }
25773
25774 /* Is this a typedef we can avoid emitting? */
25775
25776 bool
25777 is_redundant_typedef (const_tree decl)
25778 {
25779 if (TYPE_DECL_IS_STUB (decl))
25780 return true;
25781
25782 if (DECL_ARTIFICIAL (decl)
25783 && DECL_CONTEXT (decl)
25784 && is_tagged_type (DECL_CONTEXT (decl))
25785 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25786 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25787 /* Also ignore the artificial member typedef for the class name. */
25788 return true;
25789
25790 return false;
25791 }
25792
25793 /* Return TRUE if TYPE is a typedef that names a type for linkage
25794 purposes. This kind of typedefs is produced by the C++ FE for
25795 constructs like:
25796
25797 typedef struct {...} foo;
25798
25799 In that case, there is no typedef variant type produced for foo.
25800 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25801 struct type. */
25802
25803 static bool
25804 is_naming_typedef_decl (const_tree decl)
25805 {
25806 if (decl == NULL_TREE
25807 || TREE_CODE (decl) != TYPE_DECL
25808 || DECL_NAMELESS (decl)
25809 || !is_tagged_type (TREE_TYPE (decl))
25810 || DECL_IS_BUILTIN (decl)
25811 || is_redundant_typedef (decl)
25812 /* It looks like Ada produces TYPE_DECLs that are very similar
25813 to C++ naming typedefs but that have different
25814 semantics. Let's be specific to c++ for now. */
25815 || !is_cxx (decl))
25816 return FALSE;
25817
25818 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25819 && TYPE_NAME (TREE_TYPE (decl)) == decl
25820 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25821 != TYPE_NAME (TREE_TYPE (decl))));
25822 }
25823
25824 /* Looks up the DIE for a context. */
25825
25826 static inline dw_die_ref
25827 lookup_context_die (tree context)
25828 {
25829 if (context)
25830 {
25831 /* Find die that represents this context. */
25832 if (TYPE_P (context))
25833 {
25834 context = TYPE_MAIN_VARIANT (context);
25835 dw_die_ref ctx = lookup_type_die (context);
25836 if (!ctx)
25837 return NULL;
25838 return strip_naming_typedef (context, ctx);
25839 }
25840 else
25841 return lookup_decl_die (context);
25842 }
25843 return comp_unit_die ();
25844 }
25845
25846 /* Returns the DIE for a context. */
25847
25848 static inline dw_die_ref
25849 get_context_die (tree context)
25850 {
25851 if (context)
25852 {
25853 /* Find die that represents this context. */
25854 if (TYPE_P (context))
25855 {
25856 context = TYPE_MAIN_VARIANT (context);
25857 return strip_naming_typedef (context, force_type_die (context));
25858 }
25859 else
25860 return force_decl_die (context);
25861 }
25862 return comp_unit_die ();
25863 }
25864
25865 /* Returns the DIE for decl. A DIE will always be returned. */
25866
25867 static dw_die_ref
25868 force_decl_die (tree decl)
25869 {
25870 dw_die_ref decl_die;
25871 unsigned saved_external_flag;
25872 tree save_fn = NULL_TREE;
25873 decl_die = lookup_decl_die (decl);
25874 if (!decl_die)
25875 {
25876 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25877
25878 decl_die = lookup_decl_die (decl);
25879 if (decl_die)
25880 return decl_die;
25881
25882 switch (TREE_CODE (decl))
25883 {
25884 case FUNCTION_DECL:
25885 /* Clear current_function_decl, so that gen_subprogram_die thinks
25886 that this is a declaration. At this point, we just want to force
25887 declaration die. */
25888 save_fn = current_function_decl;
25889 current_function_decl = NULL_TREE;
25890 gen_subprogram_die (decl, context_die);
25891 current_function_decl = save_fn;
25892 break;
25893
25894 case VAR_DECL:
25895 /* Set external flag to force declaration die. Restore it after
25896 gen_decl_die() call. */
25897 saved_external_flag = DECL_EXTERNAL (decl);
25898 DECL_EXTERNAL (decl) = 1;
25899 gen_decl_die (decl, NULL, NULL, context_die);
25900 DECL_EXTERNAL (decl) = saved_external_flag;
25901 break;
25902
25903 case NAMESPACE_DECL:
25904 if (dwarf_version >= 3 || !dwarf_strict)
25905 dwarf2out_decl (decl);
25906 else
25907 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25908 decl_die = comp_unit_die ();
25909 break;
25910
25911 case TRANSLATION_UNIT_DECL:
25912 decl_die = comp_unit_die ();
25913 break;
25914
25915 default:
25916 gcc_unreachable ();
25917 }
25918
25919 /* We should be able to find the DIE now. */
25920 if (!decl_die)
25921 decl_die = lookup_decl_die (decl);
25922 gcc_assert (decl_die);
25923 }
25924
25925 return decl_die;
25926 }
25927
25928 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25929 always returned. */
25930
25931 static dw_die_ref
25932 force_type_die (tree type)
25933 {
25934 dw_die_ref type_die;
25935
25936 type_die = lookup_type_die (type);
25937 if (!type_die)
25938 {
25939 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25940
25941 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25942 false, context_die);
25943 gcc_assert (type_die);
25944 }
25945 return type_die;
25946 }
25947
25948 /* Force out any required namespaces to be able to output DECL,
25949 and return the new context_die for it, if it's changed. */
25950
25951 static dw_die_ref
25952 setup_namespace_context (tree thing, dw_die_ref context_die)
25953 {
25954 tree context = (DECL_P (thing)
25955 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25956 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25957 /* Force out the namespace. */
25958 context_die = force_decl_die (context);
25959
25960 return context_die;
25961 }
25962
25963 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25964 type) within its namespace, if appropriate.
25965
25966 For compatibility with older debuggers, namespace DIEs only contain
25967 declarations; all definitions are emitted at CU scope, with
25968 DW_AT_specification pointing to the declaration (like with class
25969 members). */
25970
25971 static dw_die_ref
25972 declare_in_namespace (tree thing, dw_die_ref context_die)
25973 {
25974 dw_die_ref ns_context;
25975
25976 if (debug_info_level <= DINFO_LEVEL_TERSE)
25977 return context_die;
25978
25979 /* External declarations in the local scope only need to be emitted
25980 once, not once in the namespace and once in the scope.
25981
25982 This avoids declaring the `extern' below in the
25983 namespace DIE as well as in the innermost scope:
25984
25985 namespace S
25986 {
25987 int i=5;
25988 int foo()
25989 {
25990 int i=8;
25991 extern int i;
25992 return i;
25993 }
25994 }
25995 */
25996 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25997 return context_die;
25998
25999 /* If this decl is from an inlined function, then don't try to emit it in its
26000 namespace, as we will get confused. It would have already been emitted
26001 when the abstract instance of the inline function was emitted anyways. */
26002 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26003 return context_die;
26004
26005 ns_context = setup_namespace_context (thing, context_die);
26006
26007 if (ns_context != context_die)
26008 {
26009 if (is_fortran ())
26010 return ns_context;
26011 if (DECL_P (thing))
26012 gen_decl_die (thing, NULL, NULL, ns_context);
26013 else
26014 gen_type_die (thing, ns_context);
26015 }
26016 return context_die;
26017 }
26018
26019 /* Generate a DIE for a namespace or namespace alias. */
26020
26021 static void
26022 gen_namespace_die (tree decl, dw_die_ref context_die)
26023 {
26024 dw_die_ref namespace_die;
26025
26026 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26027 they are an alias of. */
26028 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26029 {
26030 /* Output a real namespace or module. */
26031 context_die = setup_namespace_context (decl, comp_unit_die ());
26032 namespace_die = new_die (is_fortran ()
26033 ? DW_TAG_module : DW_TAG_namespace,
26034 context_die, decl);
26035 /* For Fortran modules defined in different CU don't add src coords. */
26036 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26037 {
26038 const char *name = dwarf2_name (decl, 0);
26039 if (name)
26040 add_name_attribute (namespace_die, name);
26041 }
26042 else
26043 add_name_and_src_coords_attributes (namespace_die, decl);
26044 if (DECL_EXTERNAL (decl))
26045 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26046 equate_decl_number_to_die (decl, namespace_die);
26047 }
26048 else
26049 {
26050 /* Output a namespace alias. */
26051
26052 /* Force out the namespace we are an alias of, if necessary. */
26053 dw_die_ref origin_die
26054 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26055
26056 if (DECL_FILE_SCOPE_P (decl)
26057 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26058 context_die = setup_namespace_context (decl, comp_unit_die ());
26059 /* Now create the namespace alias DIE. */
26060 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26061 add_name_and_src_coords_attributes (namespace_die, decl);
26062 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26063 equate_decl_number_to_die (decl, namespace_die);
26064 }
26065 if ((dwarf_version >= 5 || !dwarf_strict)
26066 && lang_hooks.decls.decl_dwarf_attribute (decl,
26067 DW_AT_export_symbols) == 1)
26068 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26069
26070 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26071 if (want_pubnames ())
26072 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26073 }
26074
26075 /* Generate Dwarf debug information for a decl described by DECL.
26076 The return value is currently only meaningful for PARM_DECLs,
26077 for all other decls it returns NULL.
26078
26079 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26080 It can be NULL otherwise. */
26081
26082 static dw_die_ref
26083 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26084 dw_die_ref context_die)
26085 {
26086 tree decl_or_origin = decl ? decl : origin;
26087 tree class_origin = NULL, ultimate_origin;
26088
26089 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26090 return NULL;
26091
26092 /* Ignore pointer bounds decls. */
26093 if (DECL_P (decl_or_origin)
26094 && TREE_TYPE (decl_or_origin)
26095 && POINTER_BOUNDS_P (decl_or_origin))
26096 return NULL;
26097
26098 switch (TREE_CODE (decl_or_origin))
26099 {
26100 case ERROR_MARK:
26101 break;
26102
26103 case CONST_DECL:
26104 if (!is_fortran () && !is_ada ())
26105 {
26106 /* The individual enumerators of an enum type get output when we output
26107 the Dwarf representation of the relevant enum type itself. */
26108 break;
26109 }
26110
26111 /* Emit its type. */
26112 gen_type_die (TREE_TYPE (decl), context_die);
26113
26114 /* And its containing namespace. */
26115 context_die = declare_in_namespace (decl, context_die);
26116
26117 gen_const_die (decl, context_die);
26118 break;
26119
26120 case FUNCTION_DECL:
26121 #if 0
26122 /* FIXME */
26123 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26124 on local redeclarations of global functions. That seems broken. */
26125 if (current_function_decl != decl)
26126 /* This is only a declaration. */;
26127 #endif
26128
26129 /* We should have abstract copies already and should not generate
26130 stray type DIEs in late LTO dumping. */
26131 if (! early_dwarf)
26132 ;
26133
26134 /* If we're emitting a clone, emit info for the abstract instance. */
26135 else if (origin || DECL_ORIGIN (decl) != decl)
26136 dwarf2out_abstract_function (origin
26137 ? DECL_ORIGIN (origin)
26138 : DECL_ABSTRACT_ORIGIN (decl));
26139
26140 /* If we're emitting a possibly inlined function emit it as
26141 abstract instance. */
26142 else if (cgraph_function_possibly_inlined_p (decl)
26143 && ! DECL_ABSTRACT_P (decl)
26144 && ! class_or_namespace_scope_p (context_die)
26145 /* dwarf2out_abstract_function won't emit a die if this is just
26146 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26147 that case, because that works only if we have a die. */
26148 && DECL_INITIAL (decl) != NULL_TREE)
26149 dwarf2out_abstract_function (decl);
26150
26151 /* Otherwise we're emitting the primary DIE for this decl. */
26152 else if (debug_info_level > DINFO_LEVEL_TERSE)
26153 {
26154 /* Before we describe the FUNCTION_DECL itself, make sure that we
26155 have its containing type. */
26156 if (!origin)
26157 origin = decl_class_context (decl);
26158 if (origin != NULL_TREE)
26159 gen_type_die (origin, context_die);
26160
26161 /* And its return type. */
26162 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26163
26164 /* And its virtual context. */
26165 if (DECL_VINDEX (decl) != NULL_TREE)
26166 gen_type_die (DECL_CONTEXT (decl), context_die);
26167
26168 /* Make sure we have a member DIE for decl. */
26169 if (origin != NULL_TREE)
26170 gen_type_die_for_member (origin, decl, context_die);
26171
26172 /* And its containing namespace. */
26173 context_die = declare_in_namespace (decl, context_die);
26174 }
26175
26176 /* Now output a DIE to represent the function itself. */
26177 if (decl)
26178 gen_subprogram_die (decl, context_die);
26179 break;
26180
26181 case TYPE_DECL:
26182 /* If we are in terse mode, don't generate any DIEs to represent any
26183 actual typedefs. */
26184 if (debug_info_level <= DINFO_LEVEL_TERSE)
26185 break;
26186
26187 /* In the special case of a TYPE_DECL node representing the declaration
26188 of some type tag, if the given TYPE_DECL is marked as having been
26189 instantiated from some other (original) TYPE_DECL node (e.g. one which
26190 was generated within the original definition of an inline function) we
26191 used to generate a special (abbreviated) DW_TAG_structure_type,
26192 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26193 should be actually referencing those DIEs, as variable DIEs with that
26194 type would be emitted already in the abstract origin, so it was always
26195 removed during unused type prunning. Don't add anything in this
26196 case. */
26197 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26198 break;
26199
26200 if (is_redundant_typedef (decl))
26201 gen_type_die (TREE_TYPE (decl), context_die);
26202 else
26203 /* Output a DIE to represent the typedef itself. */
26204 gen_typedef_die (decl, context_die);
26205 break;
26206
26207 case LABEL_DECL:
26208 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26209 gen_label_die (decl, context_die);
26210 break;
26211
26212 case VAR_DECL:
26213 case RESULT_DECL:
26214 /* If we are in terse mode, don't generate any DIEs to represent any
26215 variable declarations or definitions. */
26216 if (debug_info_level <= DINFO_LEVEL_TERSE)
26217 break;
26218
26219 /* Avoid generating stray type DIEs during late dwarf dumping.
26220 All types have been dumped early. */
26221 if (early_dwarf
26222 /* ??? But in LTRANS we cannot annotate early created variably
26223 modified type DIEs without copying them and adjusting all
26224 references to them. Dump them again as happens for inlining
26225 which copies both the decl and the types. */
26226 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26227 in VLA bound information for example. */
26228 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26229 current_function_decl)))
26230 {
26231 /* Output any DIEs that are needed to specify the type of this data
26232 object. */
26233 if (decl_by_reference_p (decl_or_origin))
26234 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26235 else
26236 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26237 }
26238
26239 if (early_dwarf)
26240 {
26241 /* And its containing type. */
26242 class_origin = decl_class_context (decl_or_origin);
26243 if (class_origin != NULL_TREE)
26244 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26245
26246 /* And its containing namespace. */
26247 context_die = declare_in_namespace (decl_or_origin, context_die);
26248 }
26249
26250 /* Now output the DIE to represent the data object itself. This gets
26251 complicated because of the possibility that the VAR_DECL really
26252 represents an inlined instance of a formal parameter for an inline
26253 function. */
26254 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26255 if (ultimate_origin != NULL_TREE
26256 && TREE_CODE (ultimate_origin) == PARM_DECL)
26257 gen_formal_parameter_die (decl, origin,
26258 true /* Emit name attribute. */,
26259 context_die);
26260 else
26261 gen_variable_die (decl, origin, context_die);
26262 break;
26263
26264 case FIELD_DECL:
26265 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26266 /* Ignore the nameless fields that are used to skip bits but handle C++
26267 anonymous unions and structs. */
26268 if (DECL_NAME (decl) != NULL_TREE
26269 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26270 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26271 {
26272 gen_type_die (member_declared_type (decl), context_die);
26273 gen_field_die (decl, ctx, context_die);
26274 }
26275 break;
26276
26277 case PARM_DECL:
26278 /* Avoid generating stray type DIEs during late dwarf dumping.
26279 All types have been dumped early. */
26280 if (early_dwarf
26281 /* ??? But in LTRANS we cannot annotate early created variably
26282 modified type DIEs without copying them and adjusting all
26283 references to them. Dump them again as happens for inlining
26284 which copies both the decl and the types. */
26285 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26286 in VLA bound information for example. */
26287 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26288 current_function_decl)))
26289 {
26290 if (DECL_BY_REFERENCE (decl_or_origin))
26291 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26292 else
26293 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26294 }
26295 return gen_formal_parameter_die (decl, origin,
26296 true /* Emit name attribute. */,
26297 context_die);
26298
26299 case NAMESPACE_DECL:
26300 if (dwarf_version >= 3 || !dwarf_strict)
26301 gen_namespace_die (decl, context_die);
26302 break;
26303
26304 case IMPORTED_DECL:
26305 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26306 DECL_CONTEXT (decl), context_die);
26307 break;
26308
26309 case NAMELIST_DECL:
26310 gen_namelist_decl (DECL_NAME (decl), context_die,
26311 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26312 break;
26313
26314 default:
26315 /* Probably some frontend-internal decl. Assume we don't care. */
26316 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26317 break;
26318 }
26319
26320 return NULL;
26321 }
26322 \f
26323 /* Output initial debug information for global DECL. Called at the
26324 end of the parsing process.
26325
26326 This is the initial debug generation process. As such, the DIEs
26327 generated may be incomplete. A later debug generation pass
26328 (dwarf2out_late_global_decl) will augment the information generated
26329 in this pass (e.g., with complete location info). */
26330
26331 static void
26332 dwarf2out_early_global_decl (tree decl)
26333 {
26334 set_early_dwarf s;
26335
26336 /* gen_decl_die() will set DECL_ABSTRACT because
26337 cgraph_function_possibly_inlined_p() returns true. This is in
26338 turn will cause DW_AT_inline attributes to be set.
26339
26340 This happens because at early dwarf generation, there is no
26341 cgraph information, causing cgraph_function_possibly_inlined_p()
26342 to return true. Trick cgraph_function_possibly_inlined_p()
26343 while we generate dwarf early. */
26344 bool save = symtab->global_info_ready;
26345 symtab->global_info_ready = true;
26346
26347 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26348 other DECLs and they can point to template types or other things
26349 that dwarf2out can't handle when done via dwarf2out_decl. */
26350 if (TREE_CODE (decl) != TYPE_DECL
26351 && TREE_CODE (decl) != PARM_DECL)
26352 {
26353 if (TREE_CODE (decl) == FUNCTION_DECL)
26354 {
26355 tree save_fndecl = current_function_decl;
26356
26357 /* For nested functions, make sure we have DIEs for the parents first
26358 so that all nested DIEs are generated at the proper scope in the
26359 first shot. */
26360 tree context = decl_function_context (decl);
26361 if (context != NULL)
26362 {
26363 dw_die_ref context_die = lookup_decl_die (context);
26364 current_function_decl = context;
26365
26366 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26367 enough so that it lands in its own context. This avoids type
26368 pruning issues later on. */
26369 if (context_die == NULL || is_declaration_die (context_die))
26370 dwarf2out_decl (context);
26371 }
26372
26373 /* Emit an abstract origin of a function first. This happens
26374 with C++ constructor clones for example and makes
26375 dwarf2out_abstract_function happy which requires the early
26376 DIE of the abstract instance to be present. */
26377 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26378 dw_die_ref origin_die;
26379 if (origin != NULL
26380 /* Do not emit the DIE multiple times but make sure to
26381 process it fully here in case we just saw a declaration. */
26382 && ((origin_die = lookup_decl_die (origin)) == NULL
26383 || is_declaration_die (origin_die)))
26384 {
26385 current_function_decl = origin;
26386 dwarf2out_decl (origin);
26387 }
26388
26389 /* Emit the DIE for decl but avoid doing that multiple times. */
26390 dw_die_ref old_die;
26391 if ((old_die = lookup_decl_die (decl)) == NULL
26392 || is_declaration_die (old_die))
26393 {
26394 current_function_decl = decl;
26395 dwarf2out_decl (decl);
26396 }
26397
26398 current_function_decl = save_fndecl;
26399 }
26400 else
26401 dwarf2out_decl (decl);
26402 }
26403 symtab->global_info_ready = save;
26404 }
26405
26406 /* Return whether EXPR is an expression with the following pattern:
26407 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26408
26409 static bool
26410 is_trivial_indirect_ref (tree expr)
26411 {
26412 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26413 return false;
26414
26415 tree nop = TREE_OPERAND (expr, 0);
26416 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26417 return false;
26418
26419 tree int_cst = TREE_OPERAND (nop, 0);
26420 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26421 }
26422
26423 /* Output debug information for global decl DECL. Called from
26424 toplev.c after compilation proper has finished. */
26425
26426 static void
26427 dwarf2out_late_global_decl (tree decl)
26428 {
26429 /* Fill-in any location information we were unable to determine
26430 on the first pass. */
26431 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26432 {
26433 dw_die_ref die = lookup_decl_die (decl);
26434
26435 /* We may have to generate early debug late for LTO in case debug
26436 was not enabled at compile-time or the target doesn't support
26437 the LTO early debug scheme. */
26438 if (! die && in_lto_p)
26439 {
26440 dwarf2out_decl (decl);
26441 die = lookup_decl_die (decl);
26442 }
26443
26444 if (die)
26445 {
26446 /* We get called via the symtab code invoking late_global_decl
26447 for symbols that are optimized out.
26448
26449 Do not add locations for those, except if they have a
26450 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26451 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26452 INDIRECT_REF expression, as this could generate relocations to
26453 text symbols in LTO object files, which is invalid. */
26454 varpool_node *node = varpool_node::get (decl);
26455 if ((! node || ! node->definition)
26456 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26457 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26458 tree_add_const_value_attribute_for_decl (die, decl);
26459 else
26460 add_location_or_const_value_attribute (die, decl, false);
26461 }
26462 }
26463 }
26464
26465 /* Output debug information for type decl DECL. Called from toplev.c
26466 and from language front ends (to record built-in types). */
26467 static void
26468 dwarf2out_type_decl (tree decl, int local)
26469 {
26470 if (!local)
26471 {
26472 set_early_dwarf s;
26473 dwarf2out_decl (decl);
26474 }
26475 }
26476
26477 /* Output debug information for imported module or decl DECL.
26478 NAME is non-NULL name in the lexical block if the decl has been renamed.
26479 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26480 that DECL belongs to.
26481 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26482 static void
26483 dwarf2out_imported_module_or_decl_1 (tree decl,
26484 tree name,
26485 tree lexical_block,
26486 dw_die_ref lexical_block_die)
26487 {
26488 expanded_location xloc;
26489 dw_die_ref imported_die = NULL;
26490 dw_die_ref at_import_die;
26491
26492 if (TREE_CODE (decl) == IMPORTED_DECL)
26493 {
26494 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26495 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26496 gcc_assert (decl);
26497 }
26498 else
26499 xloc = expand_location (input_location);
26500
26501 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26502 {
26503 at_import_die = force_type_die (TREE_TYPE (decl));
26504 /* For namespace N { typedef void T; } using N::T; base_type_die
26505 returns NULL, but DW_TAG_imported_declaration requires
26506 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26507 if (!at_import_die)
26508 {
26509 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26510 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26511 at_import_die = lookup_type_die (TREE_TYPE (decl));
26512 gcc_assert (at_import_die);
26513 }
26514 }
26515 else
26516 {
26517 at_import_die = lookup_decl_die (decl);
26518 if (!at_import_die)
26519 {
26520 /* If we're trying to avoid duplicate debug info, we may not have
26521 emitted the member decl for this field. Emit it now. */
26522 if (TREE_CODE (decl) == FIELD_DECL)
26523 {
26524 tree type = DECL_CONTEXT (decl);
26525
26526 if (TYPE_CONTEXT (type)
26527 && TYPE_P (TYPE_CONTEXT (type))
26528 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26529 DINFO_USAGE_DIR_USE))
26530 return;
26531 gen_type_die_for_member (type, decl,
26532 get_context_die (TYPE_CONTEXT (type)));
26533 }
26534 if (TREE_CODE (decl) == NAMELIST_DECL)
26535 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26536 get_context_die (DECL_CONTEXT (decl)),
26537 NULL_TREE);
26538 else
26539 at_import_die = force_decl_die (decl);
26540 }
26541 }
26542
26543 if (TREE_CODE (decl) == NAMESPACE_DECL)
26544 {
26545 if (dwarf_version >= 3 || !dwarf_strict)
26546 imported_die = new_die (DW_TAG_imported_module,
26547 lexical_block_die,
26548 lexical_block);
26549 else
26550 return;
26551 }
26552 else
26553 imported_die = new_die (DW_TAG_imported_declaration,
26554 lexical_block_die,
26555 lexical_block);
26556
26557 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26558 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26559 if (debug_column_info && xloc.column)
26560 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26561 if (name)
26562 add_AT_string (imported_die, DW_AT_name,
26563 IDENTIFIER_POINTER (name));
26564 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26565 }
26566
26567 /* Output debug information for imported module or decl DECL.
26568 NAME is non-NULL name in context if the decl has been renamed.
26569 CHILD is true if decl is one of the renamed decls as part of
26570 importing whole module.
26571 IMPLICIT is set if this hook is called for an implicit import
26572 such as inline namespace. */
26573
26574 static void
26575 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26576 bool child, bool implicit)
26577 {
26578 /* dw_die_ref at_import_die; */
26579 dw_die_ref scope_die;
26580
26581 if (debug_info_level <= DINFO_LEVEL_TERSE)
26582 return;
26583
26584 gcc_assert (decl);
26585
26586 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26587 should be enough, for DWARF4 and older even if we emit as extension
26588 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26589 for the benefit of consumers unaware of DW_AT_export_symbols. */
26590 if (implicit
26591 && dwarf_version >= 5
26592 && lang_hooks.decls.decl_dwarf_attribute (decl,
26593 DW_AT_export_symbols) == 1)
26594 return;
26595
26596 set_early_dwarf s;
26597
26598 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26599 We need decl DIE for reference and scope die. First, get DIE for the decl
26600 itself. */
26601
26602 /* Get the scope die for decl context. Use comp_unit_die for global module
26603 or decl. If die is not found for non globals, force new die. */
26604 if (context
26605 && TYPE_P (context)
26606 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26607 return;
26608
26609 scope_die = get_context_die (context);
26610
26611 if (child)
26612 {
26613 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26614 there is nothing we can do, here. */
26615 if (dwarf_version < 3 && dwarf_strict)
26616 return;
26617
26618 gcc_assert (scope_die->die_child);
26619 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26620 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26621 scope_die = scope_die->die_child;
26622 }
26623
26624 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26625 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26626 }
26627
26628 /* Output debug information for namelists. */
26629
26630 static dw_die_ref
26631 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26632 {
26633 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26634 tree value;
26635 unsigned i;
26636
26637 if (debug_info_level <= DINFO_LEVEL_TERSE)
26638 return NULL;
26639
26640 gcc_assert (scope_die != NULL);
26641 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26642 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26643
26644 /* If there are no item_decls, we have a nondefining namelist, e.g.
26645 with USE association; hence, set DW_AT_declaration. */
26646 if (item_decls == NULL_TREE)
26647 {
26648 add_AT_flag (nml_die, DW_AT_declaration, 1);
26649 return nml_die;
26650 }
26651
26652 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26653 {
26654 nml_item_ref_die = lookup_decl_die (value);
26655 if (!nml_item_ref_die)
26656 nml_item_ref_die = force_decl_die (value);
26657
26658 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26659 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26660 }
26661 return nml_die;
26662 }
26663
26664
26665 /* Write the debugging output for DECL and return the DIE. */
26666
26667 static void
26668 dwarf2out_decl (tree decl)
26669 {
26670 dw_die_ref context_die = comp_unit_die ();
26671
26672 switch (TREE_CODE (decl))
26673 {
26674 case ERROR_MARK:
26675 return;
26676
26677 case FUNCTION_DECL:
26678 /* If we're a nested function, initially use a parent of NULL; if we're
26679 a plain function, this will be fixed up in decls_for_scope. If
26680 we're a method, it will be ignored, since we already have a DIE. */
26681 if (decl_function_context (decl)
26682 /* But if we're in terse mode, we don't care about scope. */
26683 && debug_info_level > DINFO_LEVEL_TERSE)
26684 context_die = NULL;
26685 break;
26686
26687 case VAR_DECL:
26688 /* For local statics lookup proper context die. */
26689 if (local_function_static (decl))
26690 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26691
26692 /* If we are in terse mode, don't generate any DIEs to represent any
26693 variable declarations or definitions. */
26694 if (debug_info_level <= DINFO_LEVEL_TERSE)
26695 return;
26696 break;
26697
26698 case CONST_DECL:
26699 if (debug_info_level <= DINFO_LEVEL_TERSE)
26700 return;
26701 if (!is_fortran () && !is_ada ())
26702 return;
26703 if (TREE_STATIC (decl) && decl_function_context (decl))
26704 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26705 break;
26706
26707 case NAMESPACE_DECL:
26708 case IMPORTED_DECL:
26709 if (debug_info_level <= DINFO_LEVEL_TERSE)
26710 return;
26711 if (lookup_decl_die (decl) != NULL)
26712 return;
26713 break;
26714
26715 case TYPE_DECL:
26716 /* Don't emit stubs for types unless they are needed by other DIEs. */
26717 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26718 return;
26719
26720 /* Don't bother trying to generate any DIEs to represent any of the
26721 normal built-in types for the language we are compiling. */
26722 if (DECL_IS_BUILTIN (decl))
26723 return;
26724
26725 /* If we are in terse mode, don't generate any DIEs for types. */
26726 if (debug_info_level <= DINFO_LEVEL_TERSE)
26727 return;
26728
26729 /* If we're a function-scope tag, initially use a parent of NULL;
26730 this will be fixed up in decls_for_scope. */
26731 if (decl_function_context (decl))
26732 context_die = NULL;
26733
26734 break;
26735
26736 case NAMELIST_DECL:
26737 break;
26738
26739 default:
26740 return;
26741 }
26742
26743 gen_decl_die (decl, NULL, NULL, context_die);
26744
26745 if (flag_checking)
26746 {
26747 dw_die_ref die = lookup_decl_die (decl);
26748 if (die)
26749 check_die (die);
26750 }
26751 }
26752
26753 /* Write the debugging output for DECL. */
26754
26755 static void
26756 dwarf2out_function_decl (tree decl)
26757 {
26758 dwarf2out_decl (decl);
26759 call_arg_locations = NULL;
26760 call_arg_loc_last = NULL;
26761 call_site_count = -1;
26762 tail_call_site_count = -1;
26763 decl_loc_table->empty ();
26764 cached_dw_loc_list_table->empty ();
26765 }
26766
26767 /* Output a marker (i.e. a label) for the beginning of the generated code for
26768 a lexical block. */
26769
26770 static void
26771 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26772 unsigned int blocknum)
26773 {
26774 switch_to_section (current_function_section ());
26775 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26776 }
26777
26778 /* Output a marker (i.e. a label) for the end of the generated code for a
26779 lexical block. */
26780
26781 static void
26782 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26783 {
26784 switch_to_section (current_function_section ());
26785 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26786 }
26787
26788 /* Returns nonzero if it is appropriate not to emit any debugging
26789 information for BLOCK, because it doesn't contain any instructions.
26790
26791 Don't allow this for blocks with nested functions or local classes
26792 as we would end up with orphans, and in the presence of scheduling
26793 we may end up calling them anyway. */
26794
26795 static bool
26796 dwarf2out_ignore_block (const_tree block)
26797 {
26798 tree decl;
26799 unsigned int i;
26800
26801 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26802 if (TREE_CODE (decl) == FUNCTION_DECL
26803 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26804 return 0;
26805 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26806 {
26807 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26808 if (TREE_CODE (decl) == FUNCTION_DECL
26809 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26810 return 0;
26811 }
26812
26813 return 1;
26814 }
26815
26816 /* Hash table routines for file_hash. */
26817
26818 bool
26819 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26820 {
26821 return filename_cmp (p1->filename, p2) == 0;
26822 }
26823
26824 hashval_t
26825 dwarf_file_hasher::hash (dwarf_file_data *p)
26826 {
26827 return htab_hash_string (p->filename);
26828 }
26829
26830 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26831 dwarf2out.c) and return its "index". The index of each (known) filename is
26832 just a unique number which is associated with only that one filename. We
26833 need such numbers for the sake of generating labels (in the .debug_sfnames
26834 section) and references to those files numbers (in the .debug_srcinfo
26835 and .debug_macinfo sections). If the filename given as an argument is not
26836 found in our current list, add it to the list and assign it the next
26837 available unique index number. */
26838
26839 static struct dwarf_file_data *
26840 lookup_filename (const char *file_name)
26841 {
26842 struct dwarf_file_data * created;
26843
26844 if (!file_name)
26845 return NULL;
26846
26847 dwarf_file_data **slot
26848 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26849 INSERT);
26850 if (*slot)
26851 return *slot;
26852
26853 created = ggc_alloc<dwarf_file_data> ();
26854 created->filename = file_name;
26855 created->emitted_number = 0;
26856 *slot = created;
26857 return created;
26858 }
26859
26860 /* If the assembler will construct the file table, then translate the compiler
26861 internal file table number into the assembler file table number, and emit
26862 a .file directive if we haven't already emitted one yet. The file table
26863 numbers are different because we prune debug info for unused variables and
26864 types, which may include filenames. */
26865
26866 static int
26867 maybe_emit_file (struct dwarf_file_data * fd)
26868 {
26869 if (! fd->emitted_number)
26870 {
26871 if (last_emitted_file)
26872 fd->emitted_number = last_emitted_file->emitted_number + 1;
26873 else
26874 fd->emitted_number = 1;
26875 last_emitted_file = fd;
26876
26877 if (output_asm_line_debug_info ())
26878 {
26879 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26880 output_quoted_string (asm_out_file,
26881 remap_debug_filename (fd->filename));
26882 fputc ('\n', asm_out_file);
26883 }
26884 }
26885
26886 return fd->emitted_number;
26887 }
26888
26889 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26890 That generation should happen after function debug info has been
26891 generated. The value of the attribute is the constant value of ARG. */
26892
26893 static void
26894 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26895 {
26896 die_arg_entry entry;
26897
26898 if (!die || !arg)
26899 return;
26900
26901 gcc_assert (early_dwarf);
26902
26903 if (!tmpl_value_parm_die_table)
26904 vec_alloc (tmpl_value_parm_die_table, 32);
26905
26906 entry.die = die;
26907 entry.arg = arg;
26908 vec_safe_push (tmpl_value_parm_die_table, entry);
26909 }
26910
26911 /* Return TRUE if T is an instance of generic type, FALSE
26912 otherwise. */
26913
26914 static bool
26915 generic_type_p (tree t)
26916 {
26917 if (t == NULL_TREE || !TYPE_P (t))
26918 return false;
26919 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26920 }
26921
26922 /* Schedule the generation of the generic parameter dies for the
26923 instance of generic type T. The proper generation itself is later
26924 done by gen_scheduled_generic_parms_dies. */
26925
26926 static void
26927 schedule_generic_params_dies_gen (tree t)
26928 {
26929 if (!generic_type_p (t))
26930 return;
26931
26932 gcc_assert (early_dwarf);
26933
26934 if (!generic_type_instances)
26935 vec_alloc (generic_type_instances, 256);
26936
26937 vec_safe_push (generic_type_instances, t);
26938 }
26939
26940 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26941 by append_entry_to_tmpl_value_parm_die_table. This function must
26942 be called after function DIEs have been generated. */
26943
26944 static void
26945 gen_remaining_tmpl_value_param_die_attribute (void)
26946 {
26947 if (tmpl_value_parm_die_table)
26948 {
26949 unsigned i, j;
26950 die_arg_entry *e;
26951
26952 /* We do this in two phases - first get the cases we can
26953 handle during early-finish, preserving those we cannot
26954 (containing symbolic constants where we don't yet know
26955 whether we are going to output the referenced symbols).
26956 For those we try again at late-finish. */
26957 j = 0;
26958 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26959 {
26960 if (!e->die->removed
26961 && !tree_add_const_value_attribute (e->die, e->arg))
26962 {
26963 dw_loc_descr_ref loc = NULL;
26964 if (! early_dwarf
26965 && (dwarf_version >= 5 || !dwarf_strict))
26966 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26967 if (loc)
26968 add_AT_loc (e->die, DW_AT_location, loc);
26969 else
26970 (*tmpl_value_parm_die_table)[j++] = *e;
26971 }
26972 }
26973 tmpl_value_parm_die_table->truncate (j);
26974 }
26975 }
26976
26977 /* Generate generic parameters DIEs for instances of generic types
26978 that have been previously scheduled by
26979 schedule_generic_params_dies_gen. This function must be called
26980 after all the types of the CU have been laid out. */
26981
26982 static void
26983 gen_scheduled_generic_parms_dies (void)
26984 {
26985 unsigned i;
26986 tree t;
26987
26988 if (!generic_type_instances)
26989 return;
26990
26991 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26992 if (COMPLETE_TYPE_P (t))
26993 gen_generic_params_dies (t);
26994
26995 generic_type_instances = NULL;
26996 }
26997
26998
26999 /* Replace DW_AT_name for the decl with name. */
27000
27001 static void
27002 dwarf2out_set_name (tree decl, tree name)
27003 {
27004 dw_die_ref die;
27005 dw_attr_node *attr;
27006 const char *dname;
27007
27008 die = TYPE_SYMTAB_DIE (decl);
27009 if (!die)
27010 return;
27011
27012 dname = dwarf2_name (name, 0);
27013 if (!dname)
27014 return;
27015
27016 attr = get_AT (die, DW_AT_name);
27017 if (attr)
27018 {
27019 struct indirect_string_node *node;
27020
27021 node = find_AT_string (dname);
27022 /* replace the string. */
27023 attr->dw_attr_val.v.val_str = node;
27024 }
27025
27026 else
27027 add_name_attribute (die, dname);
27028 }
27029
27030 /* True if before or during processing of the first function being emitted. */
27031 static bool in_first_function_p = true;
27032 /* True if loc_note during dwarf2out_var_location call might still be
27033 before first real instruction at address equal to .Ltext0. */
27034 static bool maybe_at_text_label_p = true;
27035 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27036 static unsigned int first_loclabel_num_not_at_text_label;
27037
27038 /* Look ahead for a real insn, or for a begin stmt marker. */
27039
27040 static rtx_insn *
27041 dwarf2out_next_real_insn (rtx_insn *loc_note)
27042 {
27043 rtx_insn *next_real = NEXT_INSN (loc_note);
27044
27045 while (next_real)
27046 if (INSN_P (next_real))
27047 break;
27048 else
27049 next_real = NEXT_INSN (next_real);
27050
27051 return next_real;
27052 }
27053
27054 /* Called by the final INSN scan whenever we see a var location. We
27055 use it to drop labels in the right places, and throw the location in
27056 our lookup table. */
27057
27058 static void
27059 dwarf2out_var_location (rtx_insn *loc_note)
27060 {
27061 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27062 struct var_loc_node *newloc;
27063 rtx_insn *next_real, *next_note;
27064 rtx_insn *call_insn = NULL;
27065 static const char *last_label;
27066 static const char *last_postcall_label;
27067 static bool last_in_cold_section_p;
27068 static rtx_insn *expected_next_loc_note;
27069 tree decl;
27070 bool var_loc_p;
27071 var_loc_view view = 0;
27072
27073 if (!NOTE_P (loc_note))
27074 {
27075 if (CALL_P (loc_note))
27076 {
27077 maybe_reset_location_view (loc_note, cur_line_info_table);
27078 call_site_count++;
27079 if (SIBLING_CALL_P (loc_note))
27080 tail_call_site_count++;
27081 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27082 {
27083 call_insn = loc_note;
27084 loc_note = NULL;
27085 var_loc_p = false;
27086
27087 next_real = dwarf2out_next_real_insn (call_insn);
27088 next_note = NULL;
27089 cached_next_real_insn = NULL;
27090 goto create_label;
27091 }
27092 if (optimize == 0 && !flag_var_tracking)
27093 {
27094 /* When the var-tracking pass is not running, there is no note
27095 for indirect calls whose target is compile-time known. In this
27096 case, process such calls specifically so that we generate call
27097 sites for them anyway. */
27098 rtx x = PATTERN (loc_note);
27099 if (GET_CODE (x) == PARALLEL)
27100 x = XVECEXP (x, 0, 0);
27101 if (GET_CODE (x) == SET)
27102 x = SET_SRC (x);
27103 if (GET_CODE (x) == CALL)
27104 x = XEXP (x, 0);
27105 if (!MEM_P (x)
27106 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27107 || !SYMBOL_REF_DECL (XEXP (x, 0))
27108 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27109 != FUNCTION_DECL))
27110 {
27111 call_insn = loc_note;
27112 loc_note = NULL;
27113 var_loc_p = false;
27114
27115 next_real = dwarf2out_next_real_insn (call_insn);
27116 next_note = NULL;
27117 cached_next_real_insn = NULL;
27118 goto create_label;
27119 }
27120 }
27121 }
27122 else if (!debug_variable_location_views)
27123 gcc_unreachable ();
27124 else
27125 maybe_reset_location_view (loc_note, cur_line_info_table);
27126
27127 return;
27128 }
27129
27130 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27131 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27132 return;
27133
27134 /* Optimize processing a large consecutive sequence of location
27135 notes so we don't spend too much time in next_real_insn. If the
27136 next insn is another location note, remember the next_real_insn
27137 calculation for next time. */
27138 next_real = cached_next_real_insn;
27139 if (next_real)
27140 {
27141 if (expected_next_loc_note != loc_note)
27142 next_real = NULL;
27143 }
27144
27145 next_note = NEXT_INSN (loc_note);
27146 if (! next_note
27147 || next_note->deleted ()
27148 || ! NOTE_P (next_note)
27149 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27150 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27151 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27152 next_note = NULL;
27153
27154 if (! next_real)
27155 next_real = dwarf2out_next_real_insn (loc_note);
27156
27157 if (next_note)
27158 {
27159 expected_next_loc_note = next_note;
27160 cached_next_real_insn = next_real;
27161 }
27162 else
27163 cached_next_real_insn = NULL;
27164
27165 /* If there are no instructions which would be affected by this note,
27166 don't do anything. */
27167 if (var_loc_p
27168 && next_real == NULL_RTX
27169 && !NOTE_DURING_CALL_P (loc_note))
27170 return;
27171
27172 create_label:
27173
27174 if (next_real == NULL_RTX)
27175 next_real = get_last_insn ();
27176
27177 /* If there were any real insns between note we processed last time
27178 and this note (or if it is the first note), clear
27179 last_{,postcall_}label so that they are not reused this time. */
27180 if (last_var_location_insn == NULL_RTX
27181 || last_var_location_insn != next_real
27182 || last_in_cold_section_p != in_cold_section_p)
27183 {
27184 last_label = NULL;
27185 last_postcall_label = NULL;
27186 }
27187
27188 if (var_loc_p)
27189 {
27190 const char *label
27191 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27192 view = cur_line_info_table->view;
27193 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27194 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27195 if (newloc == NULL)
27196 return;
27197 }
27198 else
27199 {
27200 decl = NULL_TREE;
27201 newloc = NULL;
27202 }
27203
27204 /* If there were no real insns between note we processed last time
27205 and this note, use the label we emitted last time. Otherwise
27206 create a new label and emit it. */
27207 if (last_label == NULL)
27208 {
27209 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27210 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27211 loclabel_num++;
27212 last_label = ggc_strdup (loclabel);
27213 /* See if loclabel might be equal to .Ltext0. If yes,
27214 bump first_loclabel_num_not_at_text_label. */
27215 if (!have_multiple_function_sections
27216 && in_first_function_p
27217 && maybe_at_text_label_p)
27218 {
27219 static rtx_insn *last_start;
27220 rtx_insn *insn;
27221 for (insn = loc_note; insn; insn = previous_insn (insn))
27222 if (insn == last_start)
27223 break;
27224 else if (!NONDEBUG_INSN_P (insn))
27225 continue;
27226 else
27227 {
27228 rtx body = PATTERN (insn);
27229 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27230 continue;
27231 /* Inline asm could occupy zero bytes. */
27232 else if (GET_CODE (body) == ASM_INPUT
27233 || asm_noperands (body) >= 0)
27234 continue;
27235 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27236 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27237 continue;
27238 #endif
27239 else
27240 {
27241 /* Assume insn has non-zero length. */
27242 maybe_at_text_label_p = false;
27243 break;
27244 }
27245 }
27246 if (maybe_at_text_label_p)
27247 {
27248 last_start = loc_note;
27249 first_loclabel_num_not_at_text_label = loclabel_num;
27250 }
27251 }
27252 }
27253
27254 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27255 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27256
27257 if (!var_loc_p)
27258 {
27259 struct call_arg_loc_node *ca_loc
27260 = ggc_cleared_alloc<call_arg_loc_node> ();
27261 rtx_insn *prev = call_insn;
27262
27263 ca_loc->call_arg_loc_note
27264 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27265 ca_loc->next = NULL;
27266 ca_loc->label = last_label;
27267 gcc_assert (prev
27268 && (CALL_P (prev)
27269 || (NONJUMP_INSN_P (prev)
27270 && GET_CODE (PATTERN (prev)) == SEQUENCE
27271 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27272 if (!CALL_P (prev))
27273 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27274 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27275
27276 /* Look for a SYMBOL_REF in the "prev" instruction. */
27277 rtx x = get_call_rtx_from (PATTERN (prev));
27278 if (x)
27279 {
27280 /* Try to get the call symbol, if any. */
27281 if (MEM_P (XEXP (x, 0)))
27282 x = XEXP (x, 0);
27283 /* First, look for a memory access to a symbol_ref. */
27284 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27285 && SYMBOL_REF_DECL (XEXP (x, 0))
27286 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27287 ca_loc->symbol_ref = XEXP (x, 0);
27288 /* Otherwise, look at a compile-time known user-level function
27289 declaration. */
27290 else if (MEM_P (x)
27291 && MEM_EXPR (x)
27292 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27293 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27294 }
27295
27296 ca_loc->block = insn_scope (prev);
27297 if (call_arg_locations)
27298 call_arg_loc_last->next = ca_loc;
27299 else
27300 call_arg_locations = ca_loc;
27301 call_arg_loc_last = ca_loc;
27302 }
27303 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27304 {
27305 newloc->label = last_label;
27306 newloc->view = view;
27307 }
27308 else
27309 {
27310 if (!last_postcall_label)
27311 {
27312 sprintf (loclabel, "%s-1", last_label);
27313 last_postcall_label = ggc_strdup (loclabel);
27314 }
27315 newloc->label = last_postcall_label;
27316 /* ??? This view is at last_label, not last_label-1, but we
27317 could only assume view at last_label-1 is zero if we could
27318 assume calls always have length greater than one. This is
27319 probably true in general, though there might be a rare
27320 exception to this rule, e.g. if a call insn is optimized out
27321 by target magic. Then, even the -1 in the label will be
27322 wrong, which might invalidate the range. Anyway, using view,
27323 though technically possibly incorrect, will work as far as
27324 ranges go: since L-1 is in the middle of the call insn,
27325 (L-1).0 and (L-1).V shouldn't make any difference, and having
27326 the loclist entry refer to the .loc entry might be useful, so
27327 leave it like this. */
27328 newloc->view = view;
27329 }
27330
27331 if (var_loc_p && flag_debug_asm)
27332 {
27333 const char *name, *sep, *patstr;
27334 if (decl && DECL_NAME (decl))
27335 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27336 else
27337 name = "";
27338 if (NOTE_VAR_LOCATION_LOC (loc_note))
27339 {
27340 sep = " => ";
27341 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27342 }
27343 else
27344 {
27345 sep = " ";
27346 patstr = "RESET";
27347 }
27348 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27349 name, sep, patstr);
27350 }
27351
27352 last_var_location_insn = next_real;
27353 last_in_cold_section_p = in_cold_section_p;
27354 }
27355
27356 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27357 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27358 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27359 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27360 BLOCK_FRAGMENT_ORIGIN links. */
27361 static bool
27362 block_within_block_p (tree block, tree outer, bool bothways)
27363 {
27364 if (block == outer)
27365 return true;
27366
27367 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27368 for (tree context = BLOCK_SUPERCONTEXT (block);
27369 context != outer;
27370 context = BLOCK_SUPERCONTEXT (context))
27371 if (!context || TREE_CODE (context) != BLOCK)
27372 return false;
27373
27374 if (!bothways)
27375 return true;
27376
27377 /* Now check that each block is actually referenced by its
27378 parent. */
27379 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27380 context = BLOCK_SUPERCONTEXT (context))
27381 {
27382 if (BLOCK_FRAGMENT_ORIGIN (context))
27383 {
27384 gcc_assert (!BLOCK_SUBBLOCKS (context));
27385 context = BLOCK_FRAGMENT_ORIGIN (context);
27386 }
27387 for (tree sub = BLOCK_SUBBLOCKS (context);
27388 sub != block;
27389 sub = BLOCK_CHAIN (sub))
27390 if (!sub)
27391 return false;
27392 if (context == outer)
27393 return true;
27394 else
27395 block = context;
27396 }
27397 }
27398
27399 /* Called during final while assembling the marker of the entry point
27400 for an inlined function. */
27401
27402 static void
27403 dwarf2out_inline_entry (tree block)
27404 {
27405 gcc_assert (debug_inline_points);
27406
27407 /* If we can't represent it, don't bother. */
27408 if (!(dwarf_version >= 3 || !dwarf_strict))
27409 return;
27410
27411 gcc_assert (DECL_P (block_ultimate_origin (block)));
27412
27413 /* Sanity check the block tree. This would catch a case in which
27414 BLOCK got removed from the tree reachable from the outermost
27415 lexical block, but got retained in markers. It would still link
27416 back to its parents, but some ancestor would be missing a link
27417 down the path to the sub BLOCK. If the block got removed, its
27418 BLOCK_NUMBER will not be a usable value. */
27419 if (flag_checking)
27420 gcc_assert (block_within_block_p (block,
27421 DECL_INITIAL (current_function_decl),
27422 true));
27423
27424 gcc_assert (inlined_function_outer_scope_p (block));
27425 gcc_assert (!BLOCK_DIE (block));
27426
27427 if (BLOCK_FRAGMENT_ORIGIN (block))
27428 block = BLOCK_FRAGMENT_ORIGIN (block);
27429 /* Can the entry point ever not be at the beginning of an
27430 unfragmented lexical block? */
27431 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27432 || (cur_line_info_table
27433 && !ZERO_VIEW_P (cur_line_info_table->view))))
27434 return;
27435
27436 if (!inline_entry_data_table)
27437 inline_entry_data_table
27438 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27439
27440
27441 inline_entry_data **iedp
27442 = inline_entry_data_table->find_slot_with_hash (block,
27443 htab_hash_pointer (block),
27444 INSERT);
27445 if (*iedp)
27446 /* ??? Ideally, we'd record all entry points for the same inlined
27447 function (some may have been duplicated by e.g. unrolling), but
27448 we have no way to represent that ATM. */
27449 return;
27450
27451 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27452 ied->block = block;
27453 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27454 ied->label_num = BLOCK_NUMBER (block);
27455 if (cur_line_info_table)
27456 ied->view = cur_line_info_table->view;
27457
27458 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27459
27460 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27461 BLOCK_NUMBER (block));
27462 ASM_OUTPUT_LABEL (asm_out_file, label);
27463 }
27464
27465 /* Called from finalize_size_functions for size functions so that their body
27466 can be encoded in the debug info to describe the layout of variable-length
27467 structures. */
27468
27469 static void
27470 dwarf2out_size_function (tree decl)
27471 {
27472 function_to_dwarf_procedure (decl);
27473 }
27474
27475 /* Note in one location list that text section has changed. */
27476
27477 int
27478 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27479 {
27480 var_loc_list *list = *slot;
27481 if (list->first)
27482 list->last_before_switch
27483 = list->last->next ? list->last->next : list->last;
27484 return 1;
27485 }
27486
27487 /* Note in all location lists that text section has changed. */
27488
27489 static void
27490 var_location_switch_text_section (void)
27491 {
27492 if (decl_loc_table == NULL)
27493 return;
27494
27495 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27496 }
27497
27498 /* Create a new line number table. */
27499
27500 static dw_line_info_table *
27501 new_line_info_table (void)
27502 {
27503 dw_line_info_table *table;
27504
27505 table = ggc_cleared_alloc<dw_line_info_table> ();
27506 table->file_num = 1;
27507 table->line_num = 1;
27508 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27509 FORCE_RESET_NEXT_VIEW (table->view);
27510 table->symviews_since_reset = 0;
27511
27512 return table;
27513 }
27514
27515 /* Lookup the "current" table into which we emit line info, so
27516 that we don't have to do it for every source line. */
27517
27518 static void
27519 set_cur_line_info_table (section *sec)
27520 {
27521 dw_line_info_table *table;
27522
27523 if (sec == text_section)
27524 table = text_section_line_info;
27525 else if (sec == cold_text_section)
27526 {
27527 table = cold_text_section_line_info;
27528 if (!table)
27529 {
27530 cold_text_section_line_info = table = new_line_info_table ();
27531 table->end_label = cold_end_label;
27532 }
27533 }
27534 else
27535 {
27536 const char *end_label;
27537
27538 if (crtl->has_bb_partition)
27539 {
27540 if (in_cold_section_p)
27541 end_label = crtl->subsections.cold_section_end_label;
27542 else
27543 end_label = crtl->subsections.hot_section_end_label;
27544 }
27545 else
27546 {
27547 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27548 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27549 current_function_funcdef_no);
27550 end_label = ggc_strdup (label);
27551 }
27552
27553 table = new_line_info_table ();
27554 table->end_label = end_label;
27555
27556 vec_safe_push (separate_line_info, table);
27557 }
27558
27559 if (output_asm_line_debug_info ())
27560 table->is_stmt = (cur_line_info_table
27561 ? cur_line_info_table->is_stmt
27562 : DWARF_LINE_DEFAULT_IS_STMT_START);
27563 cur_line_info_table = table;
27564 }
27565
27566
27567 /* We need to reset the locations at the beginning of each
27568 function. We can't do this in the end_function hook, because the
27569 declarations that use the locations won't have been output when
27570 that hook is called. Also compute have_multiple_function_sections here. */
27571
27572 static void
27573 dwarf2out_begin_function (tree fun)
27574 {
27575 section *sec = function_section (fun);
27576
27577 if (sec != text_section)
27578 have_multiple_function_sections = true;
27579
27580 if (crtl->has_bb_partition && !cold_text_section)
27581 {
27582 gcc_assert (current_function_decl == fun);
27583 cold_text_section = unlikely_text_section ();
27584 switch_to_section (cold_text_section);
27585 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27586 switch_to_section (sec);
27587 }
27588
27589 dwarf2out_note_section_used ();
27590 call_site_count = 0;
27591 tail_call_site_count = 0;
27592
27593 set_cur_line_info_table (sec);
27594 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27595 }
27596
27597 /* Helper function of dwarf2out_end_function, called only after emitting
27598 the very first function into assembly. Check if some .debug_loc range
27599 might end with a .LVL* label that could be equal to .Ltext0.
27600 In that case we must force using absolute addresses in .debug_loc ranges,
27601 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27602 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27603 list terminator.
27604 Set have_multiple_function_sections to true in that case and
27605 terminate htab traversal. */
27606
27607 int
27608 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27609 {
27610 var_loc_list *entry = *slot;
27611 struct var_loc_node *node;
27612
27613 node = entry->first;
27614 if (node && node->next && node->next->label)
27615 {
27616 unsigned int i;
27617 const char *label = node->next->label;
27618 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27619
27620 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27621 {
27622 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27623 if (strcmp (label, loclabel) == 0)
27624 {
27625 have_multiple_function_sections = true;
27626 return 0;
27627 }
27628 }
27629 }
27630 return 1;
27631 }
27632
27633 /* Hook called after emitting a function into assembly.
27634 This does something only for the very first function emitted. */
27635
27636 static void
27637 dwarf2out_end_function (unsigned int)
27638 {
27639 if (in_first_function_p
27640 && !have_multiple_function_sections
27641 && first_loclabel_num_not_at_text_label
27642 && decl_loc_table)
27643 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27644 in_first_function_p = false;
27645 maybe_at_text_label_p = false;
27646 }
27647
27648 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27649 front-ends register a translation unit even before dwarf2out_init is
27650 called. */
27651 static tree main_translation_unit = NULL_TREE;
27652
27653 /* Hook called by front-ends after they built their main translation unit.
27654 Associate comp_unit_die to UNIT. */
27655
27656 static void
27657 dwarf2out_register_main_translation_unit (tree unit)
27658 {
27659 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27660 && main_translation_unit == NULL_TREE);
27661 main_translation_unit = unit;
27662 /* If dwarf2out_init has not been called yet, it will perform the association
27663 itself looking at main_translation_unit. */
27664 if (decl_die_table != NULL)
27665 equate_decl_number_to_die (unit, comp_unit_die ());
27666 }
27667
27668 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27669
27670 static void
27671 push_dw_line_info_entry (dw_line_info_table *table,
27672 enum dw_line_info_opcode opcode, unsigned int val)
27673 {
27674 dw_line_info_entry e;
27675 e.opcode = opcode;
27676 e.val = val;
27677 vec_safe_push (table->entries, e);
27678 }
27679
27680 /* Output a label to mark the beginning of a source code line entry
27681 and record information relating to this source line, in
27682 'line_info_table' for later output of the .debug_line section. */
27683 /* ??? The discriminator parameter ought to be unsigned. */
27684
27685 static void
27686 dwarf2out_source_line (unsigned int line, unsigned int column,
27687 const char *filename,
27688 int discriminator, bool is_stmt)
27689 {
27690 unsigned int file_num;
27691 dw_line_info_table *table;
27692 static var_loc_view lvugid;
27693
27694 if (debug_info_level < DINFO_LEVEL_TERSE)
27695 return;
27696
27697 table = cur_line_info_table;
27698
27699 if (line == 0)
27700 {
27701 if (debug_variable_location_views
27702 && output_asm_line_debug_info ()
27703 && table && !RESETTING_VIEW_P (table->view))
27704 {
27705 /* If we're using the assembler to compute view numbers, we
27706 can't issue a .loc directive for line zero, so we can't
27707 get a view number at this point. We might attempt to
27708 compute it from the previous view, or equate it to a
27709 subsequent view (though it might not be there!), but
27710 since we're omitting the line number entry, we might as
27711 well omit the view number as well. That means pretending
27712 it's a view number zero, which might very well turn out
27713 to be correct. ??? Extend the assembler so that the
27714 compiler could emit e.g. ".locview .LVU#", to output a
27715 view without changing line number information. We'd then
27716 have to count it in symviews_since_reset; when it's omitted,
27717 it doesn't count. */
27718 if (!zero_view_p)
27719 zero_view_p = BITMAP_GGC_ALLOC ();
27720 bitmap_set_bit (zero_view_p, table->view);
27721 if (flag_debug_asm)
27722 {
27723 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27724 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27725 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27726 ASM_COMMENT_START);
27727 assemble_name (asm_out_file, label);
27728 putc ('\n', asm_out_file);
27729 }
27730 table->view = ++lvugid;
27731 }
27732 return;
27733 }
27734
27735 /* The discriminator column was added in dwarf4. Simplify the below
27736 by simply removing it if we're not supposed to output it. */
27737 if (dwarf_version < 4 && dwarf_strict)
27738 discriminator = 0;
27739
27740 if (!debug_column_info)
27741 column = 0;
27742
27743 file_num = maybe_emit_file (lookup_filename (filename));
27744
27745 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27746 the debugger has used the second (possibly duplicate) line number
27747 at the beginning of the function to mark the end of the prologue.
27748 We could eliminate any other duplicates within the function. For
27749 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27750 that second line number entry. */
27751 /* Recall that this end-of-prologue indication is *not* the same thing
27752 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27753 to which the hook corresponds, follows the last insn that was
27754 emitted by gen_prologue. What we need is to precede the first insn
27755 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27756 insn that corresponds to something the user wrote. These may be
27757 very different locations once scheduling is enabled. */
27758
27759 if (0 && file_num == table->file_num
27760 && line == table->line_num
27761 && column == table->column_num
27762 && discriminator == table->discrim_num
27763 && is_stmt == table->is_stmt)
27764 return;
27765
27766 switch_to_section (current_function_section ());
27767
27768 /* If requested, emit something human-readable. */
27769 if (flag_debug_asm)
27770 {
27771 if (debug_column_info)
27772 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27773 filename, line, column);
27774 else
27775 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27776 filename, line);
27777 }
27778
27779 if (output_asm_line_debug_info ())
27780 {
27781 /* Emit the .loc directive understood by GNU as. */
27782 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27783 file_num, line, is_stmt, discriminator */
27784 fputs ("\t.loc ", asm_out_file);
27785 fprint_ul (asm_out_file, file_num);
27786 putc (' ', asm_out_file);
27787 fprint_ul (asm_out_file, line);
27788 putc (' ', asm_out_file);
27789 fprint_ul (asm_out_file, column);
27790
27791 if (is_stmt != table->is_stmt)
27792 {
27793 fputs (" is_stmt ", asm_out_file);
27794 putc (is_stmt ? '1' : '0', asm_out_file);
27795 }
27796 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27797 {
27798 gcc_assert (discriminator > 0);
27799 fputs (" discriminator ", asm_out_file);
27800 fprint_ul (asm_out_file, (unsigned long) discriminator);
27801 }
27802 if (debug_variable_location_views)
27803 {
27804 if (!RESETTING_VIEW_P (table->view))
27805 {
27806 table->symviews_since_reset++;
27807 if (table->symviews_since_reset > symview_upper_bound)
27808 symview_upper_bound = table->symviews_since_reset;
27809 /* When we're using the assembler to compute view
27810 numbers, we output symbolic labels after "view" in
27811 .loc directives, and the assembler will set them for
27812 us, so that we can refer to the view numbers in
27813 location lists. The only exceptions are when we know
27814 a view will be zero: "-0" is a forced reset, used
27815 e.g. in the beginning of functions, whereas "0" tells
27816 the assembler to check that there was a PC change
27817 since the previous view, in a way that implicitly
27818 resets the next view. */
27819 fputs (" view ", asm_out_file);
27820 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27821 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27822 assemble_name (asm_out_file, label);
27823 table->view = ++lvugid;
27824 }
27825 else
27826 {
27827 table->symviews_since_reset = 0;
27828 if (FORCE_RESETTING_VIEW_P (table->view))
27829 fputs (" view -0", asm_out_file);
27830 else
27831 fputs (" view 0", asm_out_file);
27832 /* Mark the present view as a zero view. Earlier debug
27833 binds may have already added its id to loclists to be
27834 emitted later, so we can't reuse the id for something
27835 else. However, it's good to know whether a view is
27836 known to be zero, because then we may be able to
27837 optimize out locviews that are all zeros, so take
27838 note of it in zero_view_p. */
27839 if (!zero_view_p)
27840 zero_view_p = BITMAP_GGC_ALLOC ();
27841 bitmap_set_bit (zero_view_p, lvugid);
27842 table->view = ++lvugid;
27843 }
27844 }
27845 putc ('\n', asm_out_file);
27846 }
27847 else
27848 {
27849 unsigned int label_num = ++line_info_label_num;
27850
27851 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27852
27853 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27854 push_dw_line_info_entry (table, LI_adv_address, label_num);
27855 else
27856 push_dw_line_info_entry (table, LI_set_address, label_num);
27857 if (debug_variable_location_views)
27858 {
27859 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27860 if (resetting)
27861 table->view = 0;
27862
27863 if (flag_debug_asm)
27864 fprintf (asm_out_file, "\t%s view %s%d\n",
27865 ASM_COMMENT_START,
27866 resetting ? "-" : "",
27867 table->view);
27868
27869 table->view++;
27870 }
27871 if (file_num != table->file_num)
27872 push_dw_line_info_entry (table, LI_set_file, file_num);
27873 if (discriminator != table->discrim_num)
27874 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27875 if (is_stmt != table->is_stmt)
27876 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27877 push_dw_line_info_entry (table, LI_set_line, line);
27878 if (debug_column_info)
27879 push_dw_line_info_entry (table, LI_set_column, column);
27880 }
27881
27882 table->file_num = file_num;
27883 table->line_num = line;
27884 table->column_num = column;
27885 table->discrim_num = discriminator;
27886 table->is_stmt = is_stmt;
27887 table->in_use = true;
27888 }
27889
27890 /* Record the beginning of a new source file. */
27891
27892 static void
27893 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27894 {
27895 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27896 {
27897 macinfo_entry e;
27898 e.code = DW_MACINFO_start_file;
27899 e.lineno = lineno;
27900 e.info = ggc_strdup (filename);
27901 vec_safe_push (macinfo_table, e);
27902 }
27903 }
27904
27905 /* Record the end of a source file. */
27906
27907 static void
27908 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27909 {
27910 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27911 {
27912 macinfo_entry e;
27913 e.code = DW_MACINFO_end_file;
27914 e.lineno = lineno;
27915 e.info = NULL;
27916 vec_safe_push (macinfo_table, e);
27917 }
27918 }
27919
27920 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27921 the tail part of the directive line, i.e. the part which is past the
27922 initial whitespace, #, whitespace, directive-name, whitespace part. */
27923
27924 static void
27925 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27926 const char *buffer ATTRIBUTE_UNUSED)
27927 {
27928 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27929 {
27930 macinfo_entry e;
27931 /* Insert a dummy first entry to be able to optimize the whole
27932 predefined macro block using DW_MACRO_import. */
27933 if (macinfo_table->is_empty () && lineno <= 1)
27934 {
27935 e.code = 0;
27936 e.lineno = 0;
27937 e.info = NULL;
27938 vec_safe_push (macinfo_table, e);
27939 }
27940 e.code = DW_MACINFO_define;
27941 e.lineno = lineno;
27942 e.info = ggc_strdup (buffer);
27943 vec_safe_push (macinfo_table, e);
27944 }
27945 }
27946
27947 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27948 the tail part of the directive line, i.e. the part which is past the
27949 initial whitespace, #, whitespace, directive-name, whitespace part. */
27950
27951 static void
27952 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27953 const char *buffer ATTRIBUTE_UNUSED)
27954 {
27955 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27956 {
27957 macinfo_entry e;
27958 /* Insert a dummy first entry to be able to optimize the whole
27959 predefined macro block using DW_MACRO_import. */
27960 if (macinfo_table->is_empty () && lineno <= 1)
27961 {
27962 e.code = 0;
27963 e.lineno = 0;
27964 e.info = NULL;
27965 vec_safe_push (macinfo_table, e);
27966 }
27967 e.code = DW_MACINFO_undef;
27968 e.lineno = lineno;
27969 e.info = ggc_strdup (buffer);
27970 vec_safe_push (macinfo_table, e);
27971 }
27972 }
27973
27974 /* Helpers to manipulate hash table of CUs. */
27975
27976 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
27977 {
27978 static inline hashval_t hash (const macinfo_entry *);
27979 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
27980 };
27981
27982 inline hashval_t
27983 macinfo_entry_hasher::hash (const macinfo_entry *entry)
27984 {
27985 return htab_hash_string (entry->info);
27986 }
27987
27988 inline bool
27989 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
27990 const macinfo_entry *entry2)
27991 {
27992 return !strcmp (entry1->info, entry2->info);
27993 }
27994
27995 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27996
27997 /* Output a single .debug_macinfo entry. */
27998
27999 static void
28000 output_macinfo_op (macinfo_entry *ref)
28001 {
28002 int file_num;
28003 size_t len;
28004 struct indirect_string_node *node;
28005 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28006 struct dwarf_file_data *fd;
28007
28008 switch (ref->code)
28009 {
28010 case DW_MACINFO_start_file:
28011 fd = lookup_filename (ref->info);
28012 file_num = maybe_emit_file (fd);
28013 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28014 dw2_asm_output_data_uleb128 (ref->lineno,
28015 "Included from line number %lu",
28016 (unsigned long) ref->lineno);
28017 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28018 break;
28019 case DW_MACINFO_end_file:
28020 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28021 break;
28022 case DW_MACINFO_define:
28023 case DW_MACINFO_undef:
28024 len = strlen (ref->info) + 1;
28025 if (!dwarf_strict
28026 && len > DWARF_OFFSET_SIZE
28027 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28028 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28029 {
28030 ref->code = ref->code == DW_MACINFO_define
28031 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28032 output_macinfo_op (ref);
28033 return;
28034 }
28035 dw2_asm_output_data (1, ref->code,
28036 ref->code == DW_MACINFO_define
28037 ? "Define macro" : "Undefine macro");
28038 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28039 (unsigned long) ref->lineno);
28040 dw2_asm_output_nstring (ref->info, -1, "The macro");
28041 break;
28042 case DW_MACRO_define_strp:
28043 case DW_MACRO_undef_strp:
28044 node = find_AT_string (ref->info);
28045 gcc_assert (node
28046 && (node->form == DW_FORM_strp
28047 || node->form == DW_FORM_GNU_str_index));
28048 dw2_asm_output_data (1, ref->code,
28049 ref->code == DW_MACRO_define_strp
28050 ? "Define macro strp"
28051 : "Undefine macro strp");
28052 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28053 (unsigned long) ref->lineno);
28054 if (node->form == DW_FORM_strp)
28055 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28056 debug_str_section, "The macro: \"%s\"",
28057 ref->info);
28058 else
28059 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28060 ref->info);
28061 break;
28062 case DW_MACRO_import:
28063 dw2_asm_output_data (1, ref->code, "Import");
28064 ASM_GENERATE_INTERNAL_LABEL (label,
28065 DEBUG_MACRO_SECTION_LABEL,
28066 ref->lineno + macinfo_label_base);
28067 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28068 break;
28069 default:
28070 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28071 ASM_COMMENT_START, (unsigned long) ref->code);
28072 break;
28073 }
28074 }
28075
28076 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28077 other compilation unit .debug_macinfo sections. IDX is the first
28078 index of a define/undef, return the number of ops that should be
28079 emitted in a comdat .debug_macinfo section and emit
28080 a DW_MACRO_import entry referencing it.
28081 If the define/undef entry should be emitted normally, return 0. */
28082
28083 static unsigned
28084 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28085 macinfo_hash_type **macinfo_htab)
28086 {
28087 macinfo_entry *first, *second, *cur, *inc;
28088 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28089 unsigned char checksum[16];
28090 struct md5_ctx ctx;
28091 char *grp_name, *tail;
28092 const char *base;
28093 unsigned int i, count, encoded_filename_len, linebuf_len;
28094 macinfo_entry **slot;
28095
28096 first = &(*macinfo_table)[idx];
28097 second = &(*macinfo_table)[idx + 1];
28098
28099 /* Optimize only if there are at least two consecutive define/undef ops,
28100 and either all of them are before first DW_MACINFO_start_file
28101 with lineno {0,1} (i.e. predefined macro block), or all of them are
28102 in some included header file. */
28103 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28104 return 0;
28105 if (vec_safe_is_empty (files))
28106 {
28107 if (first->lineno > 1 || second->lineno > 1)
28108 return 0;
28109 }
28110 else if (first->lineno == 0)
28111 return 0;
28112
28113 /* Find the last define/undef entry that can be grouped together
28114 with first and at the same time compute md5 checksum of their
28115 codes, linenumbers and strings. */
28116 md5_init_ctx (&ctx);
28117 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28118 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28119 break;
28120 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28121 break;
28122 else
28123 {
28124 unsigned char code = cur->code;
28125 md5_process_bytes (&code, 1, &ctx);
28126 checksum_uleb128 (cur->lineno, &ctx);
28127 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28128 }
28129 md5_finish_ctx (&ctx, checksum);
28130 count = i - idx;
28131
28132 /* From the containing include filename (if any) pick up just
28133 usable characters from its basename. */
28134 if (vec_safe_is_empty (files))
28135 base = "";
28136 else
28137 base = lbasename (files->last ().info);
28138 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28139 if (ISIDNUM (base[i]) || base[i] == '.')
28140 encoded_filename_len++;
28141 /* Count . at the end. */
28142 if (encoded_filename_len)
28143 encoded_filename_len++;
28144
28145 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28146 linebuf_len = strlen (linebuf);
28147
28148 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28149 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28150 + 16 * 2 + 1);
28151 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28152 tail = grp_name + 4;
28153 if (encoded_filename_len)
28154 {
28155 for (i = 0; base[i]; i++)
28156 if (ISIDNUM (base[i]) || base[i] == '.')
28157 *tail++ = base[i];
28158 *tail++ = '.';
28159 }
28160 memcpy (tail, linebuf, linebuf_len);
28161 tail += linebuf_len;
28162 *tail++ = '.';
28163 for (i = 0; i < 16; i++)
28164 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28165
28166 /* Construct a macinfo_entry for DW_MACRO_import
28167 in the empty vector entry before the first define/undef. */
28168 inc = &(*macinfo_table)[idx - 1];
28169 inc->code = DW_MACRO_import;
28170 inc->lineno = 0;
28171 inc->info = ggc_strdup (grp_name);
28172 if (!*macinfo_htab)
28173 *macinfo_htab = new macinfo_hash_type (10);
28174 /* Avoid emitting duplicates. */
28175 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28176 if (*slot != NULL)
28177 {
28178 inc->code = 0;
28179 inc->info = NULL;
28180 /* If such an entry has been used before, just emit
28181 a DW_MACRO_import op. */
28182 inc = *slot;
28183 output_macinfo_op (inc);
28184 /* And clear all macinfo_entry in the range to avoid emitting them
28185 in the second pass. */
28186 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28187 {
28188 cur->code = 0;
28189 cur->info = NULL;
28190 }
28191 }
28192 else
28193 {
28194 *slot = inc;
28195 inc->lineno = (*macinfo_htab)->elements ();
28196 output_macinfo_op (inc);
28197 }
28198 return count;
28199 }
28200
28201 /* Save any strings needed by the macinfo table in the debug str
28202 table. All strings must be collected into the table by the time
28203 index_string is called. */
28204
28205 static void
28206 save_macinfo_strings (void)
28207 {
28208 unsigned len;
28209 unsigned i;
28210 macinfo_entry *ref;
28211
28212 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28213 {
28214 switch (ref->code)
28215 {
28216 /* Match the logic in output_macinfo_op to decide on
28217 indirect strings. */
28218 case DW_MACINFO_define:
28219 case DW_MACINFO_undef:
28220 len = strlen (ref->info) + 1;
28221 if (!dwarf_strict
28222 && len > DWARF_OFFSET_SIZE
28223 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28224 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28225 set_indirect_string (find_AT_string (ref->info));
28226 break;
28227 case DW_MACRO_define_strp:
28228 case DW_MACRO_undef_strp:
28229 set_indirect_string (find_AT_string (ref->info));
28230 break;
28231 default:
28232 break;
28233 }
28234 }
28235 }
28236
28237 /* Output macinfo section(s). */
28238
28239 static void
28240 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28241 {
28242 unsigned i;
28243 unsigned long length = vec_safe_length (macinfo_table);
28244 macinfo_entry *ref;
28245 vec<macinfo_entry, va_gc> *files = NULL;
28246 macinfo_hash_type *macinfo_htab = NULL;
28247 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28248
28249 if (! length)
28250 return;
28251
28252 /* output_macinfo* uses these interchangeably. */
28253 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28254 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28255 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28256 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28257
28258 /* AIX Assembler inserts the length, so adjust the reference to match the
28259 offset expected by debuggers. */
28260 strcpy (dl_section_ref, debug_line_label);
28261 if (XCOFF_DEBUGGING_INFO)
28262 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28263
28264 /* For .debug_macro emit the section header. */
28265 if (!dwarf_strict || dwarf_version >= 5)
28266 {
28267 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28268 "DWARF macro version number");
28269 if (DWARF_OFFSET_SIZE == 8)
28270 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28271 else
28272 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28273 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28274 debug_line_section, NULL);
28275 }
28276
28277 /* In the first loop, it emits the primary .debug_macinfo section
28278 and after each emitted op the macinfo_entry is cleared.
28279 If a longer range of define/undef ops can be optimized using
28280 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28281 the vector before the first define/undef in the range and the
28282 whole range of define/undef ops is not emitted and kept. */
28283 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28284 {
28285 switch (ref->code)
28286 {
28287 case DW_MACINFO_start_file:
28288 vec_safe_push (files, *ref);
28289 break;
28290 case DW_MACINFO_end_file:
28291 if (!vec_safe_is_empty (files))
28292 files->pop ();
28293 break;
28294 case DW_MACINFO_define:
28295 case DW_MACINFO_undef:
28296 if ((!dwarf_strict || dwarf_version >= 5)
28297 && HAVE_COMDAT_GROUP
28298 && vec_safe_length (files) != 1
28299 && i > 0
28300 && i + 1 < length
28301 && (*macinfo_table)[i - 1].code == 0)
28302 {
28303 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28304 if (count)
28305 {
28306 i += count - 1;
28307 continue;
28308 }
28309 }
28310 break;
28311 case 0:
28312 /* A dummy entry may be inserted at the beginning to be able
28313 to optimize the whole block of predefined macros. */
28314 if (i == 0)
28315 continue;
28316 default:
28317 break;
28318 }
28319 output_macinfo_op (ref);
28320 ref->info = NULL;
28321 ref->code = 0;
28322 }
28323
28324 if (!macinfo_htab)
28325 return;
28326
28327 /* Save the number of transparent includes so we can adjust the
28328 label number for the fat LTO object DWARF. */
28329 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28330
28331 delete macinfo_htab;
28332 macinfo_htab = NULL;
28333
28334 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28335 terminate the current chain and switch to a new comdat .debug_macinfo
28336 section and emit the define/undef entries within it. */
28337 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28338 switch (ref->code)
28339 {
28340 case 0:
28341 continue;
28342 case DW_MACRO_import:
28343 {
28344 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28345 tree comdat_key = get_identifier (ref->info);
28346 /* Terminate the previous .debug_macinfo section. */
28347 dw2_asm_output_data (1, 0, "End compilation unit");
28348 targetm.asm_out.named_section (debug_macinfo_section_name,
28349 SECTION_DEBUG
28350 | SECTION_LINKONCE
28351 | (early_lto_debug
28352 ? SECTION_EXCLUDE : 0),
28353 comdat_key);
28354 ASM_GENERATE_INTERNAL_LABEL (label,
28355 DEBUG_MACRO_SECTION_LABEL,
28356 ref->lineno + macinfo_label_base);
28357 ASM_OUTPUT_LABEL (asm_out_file, label);
28358 ref->code = 0;
28359 ref->info = NULL;
28360 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28361 "DWARF macro version number");
28362 if (DWARF_OFFSET_SIZE == 8)
28363 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28364 else
28365 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28366 }
28367 break;
28368 case DW_MACINFO_define:
28369 case DW_MACINFO_undef:
28370 output_macinfo_op (ref);
28371 ref->code = 0;
28372 ref->info = NULL;
28373 break;
28374 default:
28375 gcc_unreachable ();
28376 }
28377
28378 macinfo_label_base += macinfo_label_base_adj;
28379 }
28380
28381 /* Initialize the various sections and labels for dwarf output and prefix
28382 them with PREFIX if non-NULL. Returns the generation (zero based
28383 number of times function was called). */
28384
28385 static unsigned
28386 init_sections_and_labels (bool early_lto_debug)
28387 {
28388 /* As we may get called multiple times have a generation count for
28389 labels. */
28390 static unsigned generation = 0;
28391
28392 if (early_lto_debug)
28393 {
28394 if (!dwarf_split_debug_info)
28395 {
28396 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28397 SECTION_DEBUG | SECTION_EXCLUDE,
28398 NULL);
28399 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28400 SECTION_DEBUG | SECTION_EXCLUDE,
28401 NULL);
28402 debug_macinfo_section_name
28403 = ((dwarf_strict && dwarf_version < 5)
28404 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28405 debug_macinfo_section = get_section (debug_macinfo_section_name,
28406 SECTION_DEBUG
28407 | SECTION_EXCLUDE, NULL);
28408 /* For macro info we have to refer to a debug_line section, so
28409 similar to split-dwarf emit a skeleton one for early debug. */
28410 debug_skeleton_line_section
28411 = get_section (DEBUG_LTO_LINE_SECTION,
28412 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28413 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28414 DEBUG_SKELETON_LINE_SECTION_LABEL,
28415 generation);
28416 }
28417 else
28418 {
28419 /* ??? Which of the following do we need early? */
28420 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28421 SECTION_DEBUG | SECTION_EXCLUDE,
28422 NULL);
28423 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28424 SECTION_DEBUG | SECTION_EXCLUDE,
28425 NULL);
28426 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28427 SECTION_DEBUG
28428 | SECTION_EXCLUDE, NULL);
28429 debug_skeleton_abbrev_section
28430 = get_section (DEBUG_LTO_ABBREV_SECTION,
28431 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28432 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28433 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28434 generation);
28435
28436 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28437 stay in the main .o, but the skeleton_line goes into the split
28438 off dwo. */
28439 debug_skeleton_line_section
28440 = get_section (DEBUG_LTO_LINE_SECTION,
28441 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28442 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28443 DEBUG_SKELETON_LINE_SECTION_LABEL,
28444 generation);
28445 debug_str_offsets_section
28446 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28447 SECTION_DEBUG | SECTION_EXCLUDE,
28448 NULL);
28449 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28450 DEBUG_SKELETON_INFO_SECTION_LABEL,
28451 generation);
28452 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28453 DEBUG_STR_DWO_SECTION_FLAGS,
28454 NULL);
28455 debug_macinfo_section_name
28456 = ((dwarf_strict && dwarf_version < 5)
28457 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28458 debug_macinfo_section = get_section (debug_macinfo_section_name,
28459 SECTION_DEBUG | SECTION_EXCLUDE,
28460 NULL);
28461 }
28462 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28463 DEBUG_STR_SECTION_FLAGS
28464 | SECTION_EXCLUDE, NULL);
28465 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28466 debug_line_str_section
28467 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28468 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28469 }
28470 else
28471 {
28472 if (!dwarf_split_debug_info)
28473 {
28474 debug_info_section = get_section (DEBUG_INFO_SECTION,
28475 SECTION_DEBUG, NULL);
28476 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28477 SECTION_DEBUG, NULL);
28478 debug_loc_section = get_section (dwarf_version >= 5
28479 ? DEBUG_LOCLISTS_SECTION
28480 : DEBUG_LOC_SECTION,
28481 SECTION_DEBUG, NULL);
28482 debug_macinfo_section_name
28483 = ((dwarf_strict && dwarf_version < 5)
28484 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28485 debug_macinfo_section = get_section (debug_macinfo_section_name,
28486 SECTION_DEBUG, NULL);
28487 }
28488 else
28489 {
28490 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28491 SECTION_DEBUG | SECTION_EXCLUDE,
28492 NULL);
28493 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28494 SECTION_DEBUG | SECTION_EXCLUDE,
28495 NULL);
28496 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28497 SECTION_DEBUG, NULL);
28498 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28499 SECTION_DEBUG, NULL);
28500 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28501 SECTION_DEBUG, NULL);
28502 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28503 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28504 generation);
28505
28506 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28507 stay in the main .o, but the skeleton_line goes into the
28508 split off dwo. */
28509 debug_skeleton_line_section
28510 = get_section (DEBUG_DWO_LINE_SECTION,
28511 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28512 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28513 DEBUG_SKELETON_LINE_SECTION_LABEL,
28514 generation);
28515 debug_str_offsets_section
28516 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28517 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28518 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28519 DEBUG_SKELETON_INFO_SECTION_LABEL,
28520 generation);
28521 debug_loc_section = get_section (dwarf_version >= 5
28522 ? DEBUG_DWO_LOCLISTS_SECTION
28523 : DEBUG_DWO_LOC_SECTION,
28524 SECTION_DEBUG | SECTION_EXCLUDE,
28525 NULL);
28526 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28527 DEBUG_STR_DWO_SECTION_FLAGS,
28528 NULL);
28529 debug_macinfo_section_name
28530 = ((dwarf_strict && dwarf_version < 5)
28531 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28532 debug_macinfo_section = get_section (debug_macinfo_section_name,
28533 SECTION_DEBUG | SECTION_EXCLUDE,
28534 NULL);
28535 }
28536 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28537 SECTION_DEBUG, NULL);
28538 debug_line_section = get_section (DEBUG_LINE_SECTION,
28539 SECTION_DEBUG, NULL);
28540 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28541 SECTION_DEBUG, NULL);
28542 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28543 SECTION_DEBUG, NULL);
28544 debug_str_section = get_section (DEBUG_STR_SECTION,
28545 DEBUG_STR_SECTION_FLAGS, NULL);
28546 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28547 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28548 DEBUG_STR_SECTION_FLAGS, NULL);
28549
28550 debug_ranges_section = get_section (dwarf_version >= 5
28551 ? DEBUG_RNGLISTS_SECTION
28552 : DEBUG_RANGES_SECTION,
28553 SECTION_DEBUG, NULL);
28554 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28555 SECTION_DEBUG, NULL);
28556 }
28557
28558 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28559 DEBUG_ABBREV_SECTION_LABEL, generation);
28560 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28561 DEBUG_INFO_SECTION_LABEL, generation);
28562 info_section_emitted = false;
28563 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28564 DEBUG_LINE_SECTION_LABEL, generation);
28565 /* There are up to 4 unique ranges labels per generation.
28566 See also output_rnglists. */
28567 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28568 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28569 if (dwarf_version >= 5 && dwarf_split_debug_info)
28570 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28571 DEBUG_RANGES_SECTION_LABEL,
28572 1 + generation * 4);
28573 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28574 DEBUG_ADDR_SECTION_LABEL, generation);
28575 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28576 (dwarf_strict && dwarf_version < 5)
28577 ? DEBUG_MACINFO_SECTION_LABEL
28578 : DEBUG_MACRO_SECTION_LABEL, generation);
28579 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28580 generation);
28581
28582 ++generation;
28583 return generation - 1;
28584 }
28585
28586 /* Set up for Dwarf output at the start of compilation. */
28587
28588 static void
28589 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28590 {
28591 /* Allocate the file_table. */
28592 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28593
28594 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28595 /* Allocate the decl_die_table. */
28596 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28597
28598 /* Allocate the decl_loc_table. */
28599 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28600
28601 /* Allocate the cached_dw_loc_list_table. */
28602 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28603
28604 /* Allocate the initial hunk of the decl_scope_table. */
28605 vec_alloc (decl_scope_table, 256);
28606
28607 /* Allocate the initial hunk of the abbrev_die_table. */
28608 vec_alloc (abbrev_die_table, 256);
28609 /* Zero-th entry is allocated, but unused. */
28610 abbrev_die_table->quick_push (NULL);
28611
28612 /* Allocate the dwarf_proc_stack_usage_map. */
28613 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28614
28615 /* Allocate the pubtypes and pubnames vectors. */
28616 vec_alloc (pubname_table, 32);
28617 vec_alloc (pubtype_table, 32);
28618
28619 vec_alloc (incomplete_types, 64);
28620
28621 vec_alloc (used_rtx_array, 32);
28622
28623 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28624 vec_alloc (macinfo_table, 64);
28625 #endif
28626
28627 /* If front-ends already registered a main translation unit but we were not
28628 ready to perform the association, do this now. */
28629 if (main_translation_unit != NULL_TREE)
28630 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28631 }
28632
28633 /* Called before compile () starts outputtting functions, variables
28634 and toplevel asms into assembly. */
28635
28636 static void
28637 dwarf2out_assembly_start (void)
28638 {
28639 if (text_section_line_info)
28640 return;
28641
28642 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28643 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28644 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28645 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28646 COLD_TEXT_SECTION_LABEL, 0);
28647 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28648
28649 switch_to_section (text_section);
28650 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28651 #endif
28652
28653 /* Make sure the line number table for .text always exists. */
28654 text_section_line_info = new_line_info_table ();
28655 text_section_line_info->end_label = text_end_label;
28656
28657 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28658 cur_line_info_table = text_section_line_info;
28659 #endif
28660
28661 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28662 && dwarf2out_do_cfi_asm ()
28663 && !dwarf2out_do_eh_frame ())
28664 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28665 }
28666
28667 /* A helper function for dwarf2out_finish called through
28668 htab_traverse. Assign a string its index. All strings must be
28669 collected into the table by the time index_string is called,
28670 because the indexing code relies on htab_traverse to traverse nodes
28671 in the same order for each run. */
28672
28673 int
28674 index_string (indirect_string_node **h, unsigned int *index)
28675 {
28676 indirect_string_node *node = *h;
28677
28678 find_string_form (node);
28679 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28680 {
28681 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28682 node->index = *index;
28683 *index += 1;
28684 }
28685 return 1;
28686 }
28687
28688 /* A helper function for output_indirect_strings called through
28689 htab_traverse. Output the offset to a string and update the
28690 current offset. */
28691
28692 int
28693 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28694 {
28695 indirect_string_node *node = *h;
28696
28697 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28698 {
28699 /* Assert that this node has been assigned an index. */
28700 gcc_assert (node->index != NO_INDEX_ASSIGNED
28701 && node->index != NOT_INDEXED);
28702 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28703 "indexed string 0x%x: %s", node->index, node->str);
28704 *offset += strlen (node->str) + 1;
28705 }
28706 return 1;
28707 }
28708
28709 /* A helper function for dwarf2out_finish called through
28710 htab_traverse. Output the indexed string. */
28711
28712 int
28713 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28714 {
28715 struct indirect_string_node *node = *h;
28716
28717 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28718 {
28719 /* Assert that the strings are output in the same order as their
28720 indexes were assigned. */
28721 gcc_assert (*cur_idx == node->index);
28722 assemble_string (node->str, strlen (node->str) + 1);
28723 *cur_idx += 1;
28724 }
28725 return 1;
28726 }
28727
28728 /* A helper function for dwarf2out_finish called through
28729 htab_traverse. Emit one queued .debug_str string. */
28730
28731 int
28732 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28733 {
28734 struct indirect_string_node *node = *h;
28735
28736 node->form = find_string_form (node);
28737 if (node->form == form && node->refcount > 0)
28738 {
28739 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28740 assemble_string (node->str, strlen (node->str) + 1);
28741 }
28742
28743 return 1;
28744 }
28745
28746 /* Output the indexed string table. */
28747
28748 static void
28749 output_indirect_strings (void)
28750 {
28751 switch_to_section (debug_str_section);
28752 if (!dwarf_split_debug_info)
28753 debug_str_hash->traverse<enum dwarf_form,
28754 output_indirect_string> (DW_FORM_strp);
28755 else
28756 {
28757 unsigned int offset = 0;
28758 unsigned int cur_idx = 0;
28759
28760 if (skeleton_debug_str_hash)
28761 skeleton_debug_str_hash->traverse<enum dwarf_form,
28762 output_indirect_string> (DW_FORM_strp);
28763
28764 switch_to_section (debug_str_offsets_section);
28765 debug_str_hash->traverse_noresize
28766 <unsigned int *, output_index_string_offset> (&offset);
28767 switch_to_section (debug_str_dwo_section);
28768 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28769 (&cur_idx);
28770 }
28771 }
28772
28773 /* Callback for htab_traverse to assign an index to an entry in the
28774 table, and to write that entry to the .debug_addr section. */
28775
28776 int
28777 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28778 {
28779 addr_table_entry *entry = *slot;
28780
28781 if (entry->refcount == 0)
28782 {
28783 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28784 || entry->index == NOT_INDEXED);
28785 return 1;
28786 }
28787
28788 gcc_assert (entry->index == *cur_index);
28789 (*cur_index)++;
28790
28791 switch (entry->kind)
28792 {
28793 case ate_kind_rtx:
28794 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28795 "0x%x", entry->index);
28796 break;
28797 case ate_kind_rtx_dtprel:
28798 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28799 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28800 DWARF2_ADDR_SIZE,
28801 entry->addr.rtl);
28802 fputc ('\n', asm_out_file);
28803 break;
28804 case ate_kind_label:
28805 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28806 "0x%x", entry->index);
28807 break;
28808 default:
28809 gcc_unreachable ();
28810 }
28811 return 1;
28812 }
28813
28814 /* Produce the .debug_addr section. */
28815
28816 static void
28817 output_addr_table (void)
28818 {
28819 unsigned int index = 0;
28820 if (addr_index_table == NULL || addr_index_table->size () == 0)
28821 return;
28822
28823 switch_to_section (debug_addr_section);
28824 addr_index_table
28825 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28826 }
28827
28828 #if ENABLE_ASSERT_CHECKING
28829 /* Verify that all marks are clear. */
28830
28831 static void
28832 verify_marks_clear (dw_die_ref die)
28833 {
28834 dw_die_ref c;
28835
28836 gcc_assert (! die->die_mark);
28837 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28838 }
28839 #endif /* ENABLE_ASSERT_CHECKING */
28840
28841 /* Clear the marks for a die and its children.
28842 Be cool if the mark isn't set. */
28843
28844 static void
28845 prune_unmark_dies (dw_die_ref die)
28846 {
28847 dw_die_ref c;
28848
28849 if (die->die_mark)
28850 die->die_mark = 0;
28851 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28852 }
28853
28854 /* Given LOC that is referenced by a DIE we're marking as used, find all
28855 referenced DWARF procedures it references and mark them as used. */
28856
28857 static void
28858 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28859 {
28860 for (; loc != NULL; loc = loc->dw_loc_next)
28861 switch (loc->dw_loc_opc)
28862 {
28863 case DW_OP_implicit_pointer:
28864 case DW_OP_convert:
28865 case DW_OP_reinterpret:
28866 case DW_OP_GNU_implicit_pointer:
28867 case DW_OP_GNU_convert:
28868 case DW_OP_GNU_reinterpret:
28869 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28870 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28871 break;
28872 case DW_OP_GNU_variable_value:
28873 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28874 {
28875 dw_die_ref ref
28876 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28877 if (ref == NULL)
28878 break;
28879 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28880 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28881 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28882 }
28883 /* FALLTHRU */
28884 case DW_OP_call2:
28885 case DW_OP_call4:
28886 case DW_OP_call_ref:
28887 case DW_OP_const_type:
28888 case DW_OP_GNU_const_type:
28889 case DW_OP_GNU_parameter_ref:
28890 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28891 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28892 break;
28893 case DW_OP_regval_type:
28894 case DW_OP_deref_type:
28895 case DW_OP_GNU_regval_type:
28896 case DW_OP_GNU_deref_type:
28897 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28898 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28899 break;
28900 case DW_OP_entry_value:
28901 case DW_OP_GNU_entry_value:
28902 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28903 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28904 break;
28905 default:
28906 break;
28907 }
28908 }
28909
28910 /* Given DIE that we're marking as used, find any other dies
28911 it references as attributes and mark them as used. */
28912
28913 static void
28914 prune_unused_types_walk_attribs (dw_die_ref die)
28915 {
28916 dw_attr_node *a;
28917 unsigned ix;
28918
28919 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28920 {
28921 switch (AT_class (a))
28922 {
28923 /* Make sure DWARF procedures referenced by location descriptions will
28924 get emitted. */
28925 case dw_val_class_loc:
28926 prune_unused_types_walk_loc_descr (AT_loc (a));
28927 break;
28928 case dw_val_class_loc_list:
28929 for (dw_loc_list_ref list = AT_loc_list (a);
28930 list != NULL;
28931 list = list->dw_loc_next)
28932 prune_unused_types_walk_loc_descr (list->expr);
28933 break;
28934
28935 case dw_val_class_view_list:
28936 /* This points to a loc_list in another attribute, so it's
28937 already covered. */
28938 break;
28939
28940 case dw_val_class_die_ref:
28941 /* A reference to another DIE.
28942 Make sure that it will get emitted.
28943 If it was broken out into a comdat group, don't follow it. */
28944 if (! AT_ref (a)->comdat_type_p
28945 || a->dw_attr == DW_AT_specification)
28946 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
28947 break;
28948
28949 case dw_val_class_str:
28950 /* Set the string's refcount to 0 so that prune_unused_types_mark
28951 accounts properly for it. */
28952 a->dw_attr_val.v.val_str->refcount = 0;
28953 break;
28954
28955 default:
28956 break;
28957 }
28958 }
28959 }
28960
28961 /* Mark the generic parameters and arguments children DIEs of DIE. */
28962
28963 static void
28964 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
28965 {
28966 dw_die_ref c;
28967
28968 if (die == NULL || die->die_child == NULL)
28969 return;
28970 c = die->die_child;
28971 do
28972 {
28973 if (is_template_parameter (c))
28974 prune_unused_types_mark (c, 1);
28975 c = c->die_sib;
28976 } while (c && c != die->die_child);
28977 }
28978
28979 /* Mark DIE as being used. If DOKIDS is true, then walk down
28980 to DIE's children. */
28981
28982 static void
28983 prune_unused_types_mark (dw_die_ref die, int dokids)
28984 {
28985 dw_die_ref c;
28986
28987 if (die->die_mark == 0)
28988 {
28989 /* We haven't done this node yet. Mark it as used. */
28990 die->die_mark = 1;
28991 /* If this is the DIE of a generic type instantiation,
28992 mark the children DIEs that describe its generic parms and
28993 args. */
28994 prune_unused_types_mark_generic_parms_dies (die);
28995
28996 /* We also have to mark its parents as used.
28997 (But we don't want to mark our parent's kids due to this,
28998 unless it is a class.) */
28999 if (die->die_parent)
29000 prune_unused_types_mark (die->die_parent,
29001 class_scope_p (die->die_parent));
29002
29003 /* Mark any referenced nodes. */
29004 prune_unused_types_walk_attribs (die);
29005
29006 /* If this node is a specification,
29007 also mark the definition, if it exists. */
29008 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29009 prune_unused_types_mark (die->die_definition, 1);
29010 }
29011
29012 if (dokids && die->die_mark != 2)
29013 {
29014 /* We need to walk the children, but haven't done so yet.
29015 Remember that we've walked the kids. */
29016 die->die_mark = 2;
29017
29018 /* If this is an array type, we need to make sure our
29019 kids get marked, even if they're types. If we're
29020 breaking out types into comdat sections, do this
29021 for all type definitions. */
29022 if (die->die_tag == DW_TAG_array_type
29023 || (use_debug_types
29024 && is_type_die (die) && ! is_declaration_die (die)))
29025 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29026 else
29027 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29028 }
29029 }
29030
29031 /* For local classes, look if any static member functions were emitted
29032 and if so, mark them. */
29033
29034 static void
29035 prune_unused_types_walk_local_classes (dw_die_ref die)
29036 {
29037 dw_die_ref c;
29038
29039 if (die->die_mark == 2)
29040 return;
29041
29042 switch (die->die_tag)
29043 {
29044 case DW_TAG_structure_type:
29045 case DW_TAG_union_type:
29046 case DW_TAG_class_type:
29047 break;
29048
29049 case DW_TAG_subprogram:
29050 if (!get_AT_flag (die, DW_AT_declaration)
29051 || die->die_definition != NULL)
29052 prune_unused_types_mark (die, 1);
29053 return;
29054
29055 default:
29056 return;
29057 }
29058
29059 /* Mark children. */
29060 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29061 }
29062
29063 /* Walk the tree DIE and mark types that we actually use. */
29064
29065 static void
29066 prune_unused_types_walk (dw_die_ref die)
29067 {
29068 dw_die_ref c;
29069
29070 /* Don't do anything if this node is already marked and
29071 children have been marked as well. */
29072 if (die->die_mark == 2)
29073 return;
29074
29075 switch (die->die_tag)
29076 {
29077 case DW_TAG_structure_type:
29078 case DW_TAG_union_type:
29079 case DW_TAG_class_type:
29080 if (die->die_perennial_p)
29081 break;
29082
29083 for (c = die->die_parent; c; c = c->die_parent)
29084 if (c->die_tag == DW_TAG_subprogram)
29085 break;
29086
29087 /* Finding used static member functions inside of classes
29088 is needed just for local classes, because for other classes
29089 static member function DIEs with DW_AT_specification
29090 are emitted outside of the DW_TAG_*_type. If we ever change
29091 it, we'd need to call this even for non-local classes. */
29092 if (c)
29093 prune_unused_types_walk_local_classes (die);
29094
29095 /* It's a type node --- don't mark it. */
29096 return;
29097
29098 case DW_TAG_const_type:
29099 case DW_TAG_packed_type:
29100 case DW_TAG_pointer_type:
29101 case DW_TAG_reference_type:
29102 case DW_TAG_rvalue_reference_type:
29103 case DW_TAG_volatile_type:
29104 case DW_TAG_typedef:
29105 case DW_TAG_array_type:
29106 case DW_TAG_interface_type:
29107 case DW_TAG_friend:
29108 case DW_TAG_enumeration_type:
29109 case DW_TAG_subroutine_type:
29110 case DW_TAG_string_type:
29111 case DW_TAG_set_type:
29112 case DW_TAG_subrange_type:
29113 case DW_TAG_ptr_to_member_type:
29114 case DW_TAG_file_type:
29115 /* Type nodes are useful only when other DIEs reference them --- don't
29116 mark them. */
29117 /* FALLTHROUGH */
29118
29119 case DW_TAG_dwarf_procedure:
29120 /* Likewise for DWARF procedures. */
29121
29122 if (die->die_perennial_p)
29123 break;
29124
29125 return;
29126
29127 default:
29128 /* Mark everything else. */
29129 break;
29130 }
29131
29132 if (die->die_mark == 0)
29133 {
29134 die->die_mark = 1;
29135
29136 /* Now, mark any dies referenced from here. */
29137 prune_unused_types_walk_attribs (die);
29138 }
29139
29140 die->die_mark = 2;
29141
29142 /* Mark children. */
29143 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29144 }
29145
29146 /* Increment the string counts on strings referred to from DIE's
29147 attributes. */
29148
29149 static void
29150 prune_unused_types_update_strings (dw_die_ref die)
29151 {
29152 dw_attr_node *a;
29153 unsigned ix;
29154
29155 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29156 if (AT_class (a) == dw_val_class_str)
29157 {
29158 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29159 s->refcount++;
29160 /* Avoid unnecessarily putting strings that are used less than
29161 twice in the hash table. */
29162 if (s->refcount
29163 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29164 {
29165 indirect_string_node **slot
29166 = debug_str_hash->find_slot_with_hash (s->str,
29167 htab_hash_string (s->str),
29168 INSERT);
29169 gcc_assert (*slot == NULL);
29170 *slot = s;
29171 }
29172 }
29173 }
29174
29175 /* Mark DIE and its children as removed. */
29176
29177 static void
29178 mark_removed (dw_die_ref die)
29179 {
29180 dw_die_ref c;
29181 die->removed = true;
29182 FOR_EACH_CHILD (die, c, mark_removed (c));
29183 }
29184
29185 /* Remove from the tree DIE any dies that aren't marked. */
29186
29187 static void
29188 prune_unused_types_prune (dw_die_ref die)
29189 {
29190 dw_die_ref c;
29191
29192 gcc_assert (die->die_mark);
29193 prune_unused_types_update_strings (die);
29194
29195 if (! die->die_child)
29196 return;
29197
29198 c = die->die_child;
29199 do {
29200 dw_die_ref prev = c, next;
29201 for (c = c->die_sib; ! c->die_mark; c = next)
29202 if (c == die->die_child)
29203 {
29204 /* No marked children between 'prev' and the end of the list. */
29205 if (prev == c)
29206 /* No marked children at all. */
29207 die->die_child = NULL;
29208 else
29209 {
29210 prev->die_sib = c->die_sib;
29211 die->die_child = prev;
29212 }
29213 c->die_sib = NULL;
29214 mark_removed (c);
29215 return;
29216 }
29217 else
29218 {
29219 next = c->die_sib;
29220 c->die_sib = NULL;
29221 mark_removed (c);
29222 }
29223
29224 if (c != prev->die_sib)
29225 prev->die_sib = c;
29226 prune_unused_types_prune (c);
29227 } while (c != die->die_child);
29228 }
29229
29230 /* Remove dies representing declarations that we never use. */
29231
29232 static void
29233 prune_unused_types (void)
29234 {
29235 unsigned int i;
29236 limbo_die_node *node;
29237 comdat_type_node *ctnode;
29238 pubname_entry *pub;
29239 dw_die_ref base_type;
29240
29241 #if ENABLE_ASSERT_CHECKING
29242 /* All the marks should already be clear. */
29243 verify_marks_clear (comp_unit_die ());
29244 for (node = limbo_die_list; node; node = node->next)
29245 verify_marks_clear (node->die);
29246 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29247 verify_marks_clear (ctnode->root_die);
29248 #endif /* ENABLE_ASSERT_CHECKING */
29249
29250 /* Mark types that are used in global variables. */
29251 premark_types_used_by_global_vars ();
29252
29253 /* Set the mark on nodes that are actually used. */
29254 prune_unused_types_walk (comp_unit_die ());
29255 for (node = limbo_die_list; node; node = node->next)
29256 prune_unused_types_walk (node->die);
29257 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29258 {
29259 prune_unused_types_walk (ctnode->root_die);
29260 prune_unused_types_mark (ctnode->type_die, 1);
29261 }
29262
29263 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29264 are unusual in that they are pubnames that are the children of pubtypes.
29265 They should only be marked via their parent DW_TAG_enumeration_type die,
29266 not as roots in themselves. */
29267 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29268 if (pub->die->die_tag != DW_TAG_enumerator)
29269 prune_unused_types_mark (pub->die, 1);
29270 for (i = 0; base_types.iterate (i, &base_type); i++)
29271 prune_unused_types_mark (base_type, 1);
29272
29273 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29274 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29275 callees). */
29276 cgraph_node *cnode;
29277 FOR_EACH_FUNCTION (cnode)
29278 if (cnode->referred_to_p (false))
29279 {
29280 dw_die_ref die = lookup_decl_die (cnode->decl);
29281 if (die == NULL || die->die_mark)
29282 continue;
29283 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29284 if (e->caller != cnode
29285 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29286 {
29287 prune_unused_types_mark (die, 1);
29288 break;
29289 }
29290 }
29291
29292 if (debug_str_hash)
29293 debug_str_hash->empty ();
29294 if (skeleton_debug_str_hash)
29295 skeleton_debug_str_hash->empty ();
29296 prune_unused_types_prune (comp_unit_die ());
29297 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29298 {
29299 node = *pnode;
29300 if (!node->die->die_mark)
29301 *pnode = node->next;
29302 else
29303 {
29304 prune_unused_types_prune (node->die);
29305 pnode = &node->next;
29306 }
29307 }
29308 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29309 prune_unused_types_prune (ctnode->root_die);
29310
29311 /* Leave the marks clear. */
29312 prune_unmark_dies (comp_unit_die ());
29313 for (node = limbo_die_list; node; node = node->next)
29314 prune_unmark_dies (node->die);
29315 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29316 prune_unmark_dies (ctnode->root_die);
29317 }
29318
29319 /* Helpers to manipulate hash table of comdat type units. */
29320
29321 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29322 {
29323 static inline hashval_t hash (const comdat_type_node *);
29324 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29325 };
29326
29327 inline hashval_t
29328 comdat_type_hasher::hash (const comdat_type_node *type_node)
29329 {
29330 hashval_t h;
29331 memcpy (&h, type_node->signature, sizeof (h));
29332 return h;
29333 }
29334
29335 inline bool
29336 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29337 const comdat_type_node *type_node_2)
29338 {
29339 return (! memcmp (type_node_1->signature, type_node_2->signature,
29340 DWARF_TYPE_SIGNATURE_SIZE));
29341 }
29342
29343 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29344 to the location it would have been added, should we know its
29345 DECL_ASSEMBLER_NAME when we added other attributes. This will
29346 probably improve compactness of debug info, removing equivalent
29347 abbrevs, and hide any differences caused by deferring the
29348 computation of the assembler name, triggered by e.g. PCH. */
29349
29350 static inline void
29351 move_linkage_attr (dw_die_ref die)
29352 {
29353 unsigned ix = vec_safe_length (die->die_attr);
29354 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29355
29356 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29357 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29358
29359 while (--ix > 0)
29360 {
29361 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29362
29363 if (prev->dw_attr == DW_AT_decl_line
29364 || prev->dw_attr == DW_AT_decl_column
29365 || prev->dw_attr == DW_AT_name)
29366 break;
29367 }
29368
29369 if (ix != vec_safe_length (die->die_attr) - 1)
29370 {
29371 die->die_attr->pop ();
29372 die->die_attr->quick_insert (ix, linkage);
29373 }
29374 }
29375
29376 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29377 referenced from typed stack ops and count how often they are used. */
29378
29379 static void
29380 mark_base_types (dw_loc_descr_ref loc)
29381 {
29382 dw_die_ref base_type = NULL;
29383
29384 for (; loc; loc = loc->dw_loc_next)
29385 {
29386 switch (loc->dw_loc_opc)
29387 {
29388 case DW_OP_regval_type:
29389 case DW_OP_deref_type:
29390 case DW_OP_GNU_regval_type:
29391 case DW_OP_GNU_deref_type:
29392 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29393 break;
29394 case DW_OP_convert:
29395 case DW_OP_reinterpret:
29396 case DW_OP_GNU_convert:
29397 case DW_OP_GNU_reinterpret:
29398 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29399 continue;
29400 /* FALLTHRU */
29401 case DW_OP_const_type:
29402 case DW_OP_GNU_const_type:
29403 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29404 break;
29405 case DW_OP_entry_value:
29406 case DW_OP_GNU_entry_value:
29407 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29408 continue;
29409 default:
29410 continue;
29411 }
29412 gcc_assert (base_type->die_parent == comp_unit_die ());
29413 if (base_type->die_mark)
29414 base_type->die_mark++;
29415 else
29416 {
29417 base_types.safe_push (base_type);
29418 base_type->die_mark = 1;
29419 }
29420 }
29421 }
29422
29423 /* Comparison function for sorting marked base types. */
29424
29425 static int
29426 base_type_cmp (const void *x, const void *y)
29427 {
29428 dw_die_ref dx = *(const dw_die_ref *) x;
29429 dw_die_ref dy = *(const dw_die_ref *) y;
29430 unsigned int byte_size1, byte_size2;
29431 unsigned int encoding1, encoding2;
29432 unsigned int align1, align2;
29433 if (dx->die_mark > dy->die_mark)
29434 return -1;
29435 if (dx->die_mark < dy->die_mark)
29436 return 1;
29437 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29438 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29439 if (byte_size1 < byte_size2)
29440 return 1;
29441 if (byte_size1 > byte_size2)
29442 return -1;
29443 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29444 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29445 if (encoding1 < encoding2)
29446 return 1;
29447 if (encoding1 > encoding2)
29448 return -1;
29449 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29450 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29451 if (align1 < align2)
29452 return 1;
29453 if (align1 > align2)
29454 return -1;
29455 return 0;
29456 }
29457
29458 /* Move base types marked by mark_base_types as early as possible
29459 in the CU, sorted by decreasing usage count both to make the
29460 uleb128 references as small as possible and to make sure they
29461 will have die_offset already computed by calc_die_sizes when
29462 sizes of typed stack loc ops is computed. */
29463
29464 static void
29465 move_marked_base_types (void)
29466 {
29467 unsigned int i;
29468 dw_die_ref base_type, die, c;
29469
29470 if (base_types.is_empty ())
29471 return;
29472
29473 /* Sort by decreasing usage count, they will be added again in that
29474 order later on. */
29475 base_types.qsort (base_type_cmp);
29476 die = comp_unit_die ();
29477 c = die->die_child;
29478 do
29479 {
29480 dw_die_ref prev = c;
29481 c = c->die_sib;
29482 while (c->die_mark)
29483 {
29484 remove_child_with_prev (c, prev);
29485 /* As base types got marked, there must be at least
29486 one node other than DW_TAG_base_type. */
29487 gcc_assert (die->die_child != NULL);
29488 c = prev->die_sib;
29489 }
29490 }
29491 while (c != die->die_child);
29492 gcc_assert (die->die_child);
29493 c = die->die_child;
29494 for (i = 0; base_types.iterate (i, &base_type); i++)
29495 {
29496 base_type->die_mark = 0;
29497 base_type->die_sib = c->die_sib;
29498 c->die_sib = base_type;
29499 c = base_type;
29500 }
29501 }
29502
29503 /* Helper function for resolve_addr, attempt to resolve
29504 one CONST_STRING, return true if successful. Similarly verify that
29505 SYMBOL_REFs refer to variables emitted in the current CU. */
29506
29507 static bool
29508 resolve_one_addr (rtx *addr)
29509 {
29510 rtx rtl = *addr;
29511
29512 if (GET_CODE (rtl) == CONST_STRING)
29513 {
29514 size_t len = strlen (XSTR (rtl, 0)) + 1;
29515 tree t = build_string (len, XSTR (rtl, 0));
29516 tree tlen = size_int (len - 1);
29517 TREE_TYPE (t)
29518 = build_array_type (char_type_node, build_index_type (tlen));
29519 rtl = lookup_constant_def (t);
29520 if (!rtl || !MEM_P (rtl))
29521 return false;
29522 rtl = XEXP (rtl, 0);
29523 if (GET_CODE (rtl) == SYMBOL_REF
29524 && SYMBOL_REF_DECL (rtl)
29525 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29526 return false;
29527 vec_safe_push (used_rtx_array, rtl);
29528 *addr = rtl;
29529 return true;
29530 }
29531
29532 if (GET_CODE (rtl) == SYMBOL_REF
29533 && SYMBOL_REF_DECL (rtl))
29534 {
29535 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29536 {
29537 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29538 return false;
29539 }
29540 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29541 return false;
29542 }
29543
29544 if (GET_CODE (rtl) == CONST)
29545 {
29546 subrtx_ptr_iterator::array_type array;
29547 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29548 if (!resolve_one_addr (*iter))
29549 return false;
29550 }
29551
29552 return true;
29553 }
29554
29555 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29556 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29557 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29558
29559 static rtx
29560 string_cst_pool_decl (tree t)
29561 {
29562 rtx rtl = output_constant_def (t, 1);
29563 unsigned char *array;
29564 dw_loc_descr_ref l;
29565 tree decl;
29566 size_t len;
29567 dw_die_ref ref;
29568
29569 if (!rtl || !MEM_P (rtl))
29570 return NULL_RTX;
29571 rtl = XEXP (rtl, 0);
29572 if (GET_CODE (rtl) != SYMBOL_REF
29573 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29574 return NULL_RTX;
29575
29576 decl = SYMBOL_REF_DECL (rtl);
29577 if (!lookup_decl_die (decl))
29578 {
29579 len = TREE_STRING_LENGTH (t);
29580 vec_safe_push (used_rtx_array, rtl);
29581 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29582 array = ggc_vec_alloc<unsigned char> (len);
29583 memcpy (array, TREE_STRING_POINTER (t), len);
29584 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29585 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29586 l->dw_loc_oprnd2.v.val_vec.length = len;
29587 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29588 l->dw_loc_oprnd2.v.val_vec.array = array;
29589 add_AT_loc (ref, DW_AT_location, l);
29590 equate_decl_number_to_die (decl, ref);
29591 }
29592 return rtl;
29593 }
29594
29595 /* Helper function of resolve_addr_in_expr. LOC is
29596 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29597 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29598 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29599 with DW_OP_implicit_pointer if possible
29600 and return true, if unsuccessful, return false. */
29601
29602 static bool
29603 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29604 {
29605 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29606 HOST_WIDE_INT offset = 0;
29607 dw_die_ref ref = NULL;
29608 tree decl;
29609
29610 if (GET_CODE (rtl) == CONST
29611 && GET_CODE (XEXP (rtl, 0)) == PLUS
29612 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29613 {
29614 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29615 rtl = XEXP (XEXP (rtl, 0), 0);
29616 }
29617 if (GET_CODE (rtl) == CONST_STRING)
29618 {
29619 size_t len = strlen (XSTR (rtl, 0)) + 1;
29620 tree t = build_string (len, XSTR (rtl, 0));
29621 tree tlen = size_int (len - 1);
29622
29623 TREE_TYPE (t)
29624 = build_array_type (char_type_node, build_index_type (tlen));
29625 rtl = string_cst_pool_decl (t);
29626 if (!rtl)
29627 return false;
29628 }
29629 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29630 {
29631 decl = SYMBOL_REF_DECL (rtl);
29632 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29633 {
29634 ref = lookup_decl_die (decl);
29635 if (ref && (get_AT (ref, DW_AT_location)
29636 || get_AT (ref, DW_AT_const_value)))
29637 {
29638 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29639 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29640 loc->dw_loc_oprnd1.val_entry = NULL;
29641 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29642 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29643 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29644 loc->dw_loc_oprnd2.v.val_int = offset;
29645 return true;
29646 }
29647 }
29648 }
29649 return false;
29650 }
29651
29652 /* Helper function for resolve_addr, handle one location
29653 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29654 the location list couldn't be resolved. */
29655
29656 static bool
29657 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29658 {
29659 dw_loc_descr_ref keep = NULL;
29660 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29661 switch (loc->dw_loc_opc)
29662 {
29663 case DW_OP_addr:
29664 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29665 {
29666 if ((prev == NULL
29667 || prev->dw_loc_opc == DW_OP_piece
29668 || prev->dw_loc_opc == DW_OP_bit_piece)
29669 && loc->dw_loc_next
29670 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29671 && (!dwarf_strict || dwarf_version >= 5)
29672 && optimize_one_addr_into_implicit_ptr (loc))
29673 break;
29674 return false;
29675 }
29676 break;
29677 case DW_OP_GNU_addr_index:
29678 case DW_OP_GNU_const_index:
29679 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
29680 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
29681 {
29682 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29683 if (!resolve_one_addr (&rtl))
29684 return false;
29685 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29686 loc->dw_loc_oprnd1.val_entry
29687 = add_addr_table_entry (rtl, ate_kind_rtx);
29688 }
29689 break;
29690 case DW_OP_const4u:
29691 case DW_OP_const8u:
29692 if (loc->dtprel
29693 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29694 return false;
29695 break;
29696 case DW_OP_plus_uconst:
29697 if (size_of_loc_descr (loc)
29698 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29699 + 1
29700 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29701 {
29702 dw_loc_descr_ref repl
29703 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29704 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29705 add_loc_descr (&repl, loc->dw_loc_next);
29706 *loc = *repl;
29707 }
29708 break;
29709 case DW_OP_implicit_value:
29710 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29711 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29712 return false;
29713 break;
29714 case DW_OP_implicit_pointer:
29715 case DW_OP_GNU_implicit_pointer:
29716 case DW_OP_GNU_parameter_ref:
29717 case DW_OP_GNU_variable_value:
29718 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29719 {
29720 dw_die_ref ref
29721 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29722 if (ref == NULL)
29723 return false;
29724 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29725 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29726 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29727 }
29728 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29729 {
29730 if (prev == NULL
29731 && loc->dw_loc_next == NULL
29732 && AT_class (a) == dw_val_class_loc)
29733 switch (a->dw_attr)
29734 {
29735 /* Following attributes allow both exprloc and reference,
29736 so if the whole expression is DW_OP_GNU_variable_value
29737 alone we could transform it into reference. */
29738 case DW_AT_byte_size:
29739 case DW_AT_bit_size:
29740 case DW_AT_lower_bound:
29741 case DW_AT_upper_bound:
29742 case DW_AT_bit_stride:
29743 case DW_AT_count:
29744 case DW_AT_allocated:
29745 case DW_AT_associated:
29746 case DW_AT_byte_stride:
29747 a->dw_attr_val.val_class = dw_val_class_die_ref;
29748 a->dw_attr_val.val_entry = NULL;
29749 a->dw_attr_val.v.val_die_ref.die
29750 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29751 a->dw_attr_val.v.val_die_ref.external = 0;
29752 return true;
29753 default:
29754 break;
29755 }
29756 if (dwarf_strict)
29757 return false;
29758 }
29759 break;
29760 case DW_OP_const_type:
29761 case DW_OP_regval_type:
29762 case DW_OP_deref_type:
29763 case DW_OP_convert:
29764 case DW_OP_reinterpret:
29765 case DW_OP_GNU_const_type:
29766 case DW_OP_GNU_regval_type:
29767 case DW_OP_GNU_deref_type:
29768 case DW_OP_GNU_convert:
29769 case DW_OP_GNU_reinterpret:
29770 while (loc->dw_loc_next
29771 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29772 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29773 {
29774 dw_die_ref base1, base2;
29775 unsigned enc1, enc2, size1, size2;
29776 if (loc->dw_loc_opc == DW_OP_regval_type
29777 || loc->dw_loc_opc == DW_OP_deref_type
29778 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29779 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29780 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29781 else if (loc->dw_loc_oprnd1.val_class
29782 == dw_val_class_unsigned_const)
29783 break;
29784 else
29785 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29786 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29787 == dw_val_class_unsigned_const)
29788 break;
29789 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29790 gcc_assert (base1->die_tag == DW_TAG_base_type
29791 && base2->die_tag == DW_TAG_base_type);
29792 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29793 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29794 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29795 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29796 if (size1 == size2
29797 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29798 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29799 && loc != keep)
29800 || enc1 == enc2))
29801 {
29802 /* Optimize away next DW_OP_convert after
29803 adjusting LOC's base type die reference. */
29804 if (loc->dw_loc_opc == DW_OP_regval_type
29805 || loc->dw_loc_opc == DW_OP_deref_type
29806 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29807 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29808 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29809 else
29810 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29811 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29812 continue;
29813 }
29814 /* Don't change integer DW_OP_convert after e.g. floating
29815 point typed stack entry. */
29816 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29817 keep = loc->dw_loc_next;
29818 break;
29819 }
29820 break;
29821 default:
29822 break;
29823 }
29824 return true;
29825 }
29826
29827 /* Helper function of resolve_addr. DIE had DW_AT_location of
29828 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29829 and DW_OP_addr couldn't be resolved. resolve_addr has already
29830 removed the DW_AT_location attribute. This function attempts to
29831 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29832 to it or DW_AT_const_value attribute, if possible. */
29833
29834 static void
29835 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29836 {
29837 if (!VAR_P (decl)
29838 || lookup_decl_die (decl) != die
29839 || DECL_EXTERNAL (decl)
29840 || !TREE_STATIC (decl)
29841 || DECL_INITIAL (decl) == NULL_TREE
29842 || DECL_P (DECL_INITIAL (decl))
29843 || get_AT (die, DW_AT_const_value))
29844 return;
29845
29846 tree init = DECL_INITIAL (decl);
29847 HOST_WIDE_INT offset = 0;
29848 /* For variables that have been optimized away and thus
29849 don't have a memory location, see if we can emit
29850 DW_AT_const_value instead. */
29851 if (tree_add_const_value_attribute (die, init))
29852 return;
29853 if (dwarf_strict && dwarf_version < 5)
29854 return;
29855 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29856 and ADDR_EXPR refers to a decl that has DW_AT_location or
29857 DW_AT_const_value (but isn't addressable, otherwise
29858 resolving the original DW_OP_addr wouldn't fail), see if
29859 we can add DW_OP_implicit_pointer. */
29860 STRIP_NOPS (init);
29861 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29862 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29863 {
29864 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29865 init = TREE_OPERAND (init, 0);
29866 STRIP_NOPS (init);
29867 }
29868 if (TREE_CODE (init) != ADDR_EXPR)
29869 return;
29870 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29871 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29872 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29873 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29874 && TREE_OPERAND (init, 0) != decl))
29875 {
29876 dw_die_ref ref;
29877 dw_loc_descr_ref l;
29878
29879 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29880 {
29881 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29882 if (!rtl)
29883 return;
29884 decl = SYMBOL_REF_DECL (rtl);
29885 }
29886 else
29887 decl = TREE_OPERAND (init, 0);
29888 ref = lookup_decl_die (decl);
29889 if (ref == NULL
29890 || (!get_AT (ref, DW_AT_location)
29891 && !get_AT (ref, DW_AT_const_value)))
29892 return;
29893 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29894 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29895 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29896 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29897 add_AT_loc (die, DW_AT_location, l);
29898 }
29899 }
29900
29901 /* Return NULL if l is a DWARF expression, or first op that is not
29902 valid DWARF expression. */
29903
29904 static dw_loc_descr_ref
29905 non_dwarf_expression (dw_loc_descr_ref l)
29906 {
29907 while (l)
29908 {
29909 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29910 return l;
29911 switch (l->dw_loc_opc)
29912 {
29913 case DW_OP_regx:
29914 case DW_OP_implicit_value:
29915 case DW_OP_stack_value:
29916 case DW_OP_implicit_pointer:
29917 case DW_OP_GNU_implicit_pointer:
29918 case DW_OP_GNU_parameter_ref:
29919 case DW_OP_piece:
29920 case DW_OP_bit_piece:
29921 return l;
29922 default:
29923 break;
29924 }
29925 l = l->dw_loc_next;
29926 }
29927 return NULL;
29928 }
29929
29930 /* Return adjusted copy of EXPR:
29931 If it is empty DWARF expression, return it.
29932 If it is valid non-empty DWARF expression,
29933 return copy of EXPR with DW_OP_deref appended to it.
29934 If it is DWARF expression followed by DW_OP_reg{N,x}, return
29935 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
29936 If it is DWARF expression followed by DW_OP_stack_value, return
29937 copy of the DWARF expression without anything appended.
29938 Otherwise, return NULL. */
29939
29940 static dw_loc_descr_ref
29941 copy_deref_exprloc (dw_loc_descr_ref expr)
29942 {
29943 dw_loc_descr_ref tail = NULL;
29944
29945 if (expr == NULL)
29946 return NULL;
29947
29948 dw_loc_descr_ref l = non_dwarf_expression (expr);
29949 if (l && l->dw_loc_next)
29950 return NULL;
29951
29952 if (l)
29953 {
29954 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29955 tail = new_loc_descr ((enum dwarf_location_atom)
29956 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
29957 0, 0);
29958 else
29959 switch (l->dw_loc_opc)
29960 {
29961 case DW_OP_regx:
29962 tail = new_loc_descr (DW_OP_bregx,
29963 l->dw_loc_oprnd1.v.val_unsigned, 0);
29964 break;
29965 case DW_OP_stack_value:
29966 break;
29967 default:
29968 return NULL;
29969 }
29970 }
29971 else
29972 tail = new_loc_descr (DW_OP_deref, 0, 0);
29973
29974 dw_loc_descr_ref ret = NULL, *p = &ret;
29975 while (expr != l)
29976 {
29977 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
29978 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
29979 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
29980 p = &(*p)->dw_loc_next;
29981 expr = expr->dw_loc_next;
29982 }
29983 *p = tail;
29984 return ret;
29985 }
29986
29987 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
29988 reference to a variable or argument, adjust it if needed and return:
29989 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
29990 attribute if present should be removed
29991 0 keep the attribute perhaps with minor modifications, no need to rescan
29992 1 if the attribute has been successfully adjusted. */
29993
29994 static int
29995 optimize_string_length (dw_attr_node *a)
29996 {
29997 dw_loc_descr_ref l = AT_loc (a), lv;
29998 dw_die_ref die;
29999 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30000 {
30001 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30002 die = lookup_decl_die (decl);
30003 if (die)
30004 {
30005 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30006 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30007 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30008 }
30009 else
30010 return -1;
30011 }
30012 else
30013 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30014
30015 /* DWARF5 allows reference class, so we can then reference the DIE.
30016 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30017 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30018 {
30019 a->dw_attr_val.val_class = dw_val_class_die_ref;
30020 a->dw_attr_val.val_entry = NULL;
30021 a->dw_attr_val.v.val_die_ref.die = die;
30022 a->dw_attr_val.v.val_die_ref.external = 0;
30023 return 0;
30024 }
30025
30026 dw_attr_node *av = get_AT (die, DW_AT_location);
30027 dw_loc_list_ref d;
30028 bool non_dwarf_expr = false;
30029
30030 if (av == NULL)
30031 return dwarf_strict ? -1 : 0;
30032 switch (AT_class (av))
30033 {
30034 case dw_val_class_loc_list:
30035 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30036 if (d->expr && non_dwarf_expression (d->expr))
30037 non_dwarf_expr = true;
30038 break;
30039 case dw_val_class_view_list:
30040 gcc_unreachable ();
30041 case dw_val_class_loc:
30042 lv = AT_loc (av);
30043 if (lv == NULL)
30044 return dwarf_strict ? -1 : 0;
30045 if (non_dwarf_expression (lv))
30046 non_dwarf_expr = true;
30047 break;
30048 default:
30049 return dwarf_strict ? -1 : 0;
30050 }
30051
30052 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30053 into DW_OP_call4 or DW_OP_GNU_variable_value into
30054 DW_OP_call4 DW_OP_deref, do so. */
30055 if (!non_dwarf_expr
30056 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30057 {
30058 l->dw_loc_opc = DW_OP_call4;
30059 if (l->dw_loc_next)
30060 l->dw_loc_next = NULL;
30061 else
30062 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30063 return 0;
30064 }
30065
30066 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30067 copy over the DW_AT_location attribute from die to a. */
30068 if (l->dw_loc_next != NULL)
30069 {
30070 a->dw_attr_val = av->dw_attr_val;
30071 return 1;
30072 }
30073
30074 dw_loc_list_ref list, *p;
30075 switch (AT_class (av))
30076 {
30077 case dw_val_class_loc_list:
30078 p = &list;
30079 list = NULL;
30080 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30081 {
30082 lv = copy_deref_exprloc (d->expr);
30083 if (lv)
30084 {
30085 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30086 p = &(*p)->dw_loc_next;
30087 }
30088 else if (!dwarf_strict && d->expr)
30089 return 0;
30090 }
30091 if (list == NULL)
30092 return dwarf_strict ? -1 : 0;
30093 a->dw_attr_val.val_class = dw_val_class_loc_list;
30094 gen_llsym (list);
30095 *AT_loc_list_ptr (a) = list;
30096 return 1;
30097 case dw_val_class_loc:
30098 lv = copy_deref_exprloc (AT_loc (av));
30099 if (lv == NULL)
30100 return dwarf_strict ? -1 : 0;
30101 a->dw_attr_val.v.val_loc = lv;
30102 return 1;
30103 default:
30104 gcc_unreachable ();
30105 }
30106 }
30107
30108 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30109 an address in .rodata section if the string literal is emitted there,
30110 or remove the containing location list or replace DW_AT_const_value
30111 with DW_AT_location and empty location expression, if it isn't found
30112 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30113 to something that has been emitted in the current CU. */
30114
30115 static void
30116 resolve_addr (dw_die_ref die)
30117 {
30118 dw_die_ref c;
30119 dw_attr_node *a;
30120 dw_loc_list_ref *curr, *start, loc;
30121 unsigned ix;
30122 bool remove_AT_byte_size = false;
30123
30124 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30125 switch (AT_class (a))
30126 {
30127 case dw_val_class_loc_list:
30128 start = curr = AT_loc_list_ptr (a);
30129 loc = *curr;
30130 gcc_assert (loc);
30131 /* The same list can be referenced more than once. See if we have
30132 already recorded the result from a previous pass. */
30133 if (loc->replaced)
30134 *curr = loc->dw_loc_next;
30135 else if (!loc->resolved_addr)
30136 {
30137 /* As things stand, we do not expect or allow one die to
30138 reference a suffix of another die's location list chain.
30139 References must be identical or completely separate.
30140 There is therefore no need to cache the result of this
30141 pass on any list other than the first; doing so
30142 would lead to unnecessary writes. */
30143 while (*curr)
30144 {
30145 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30146 if (!resolve_addr_in_expr (a, (*curr)->expr))
30147 {
30148 dw_loc_list_ref next = (*curr)->dw_loc_next;
30149 dw_loc_descr_ref l = (*curr)->expr;
30150
30151 if (next && (*curr)->ll_symbol)
30152 {
30153 gcc_assert (!next->ll_symbol);
30154 next->ll_symbol = (*curr)->ll_symbol;
30155 next->vl_symbol = (*curr)->vl_symbol;
30156 }
30157 if (dwarf_split_debug_info)
30158 remove_loc_list_addr_table_entries (l);
30159 *curr = next;
30160 }
30161 else
30162 {
30163 mark_base_types ((*curr)->expr);
30164 curr = &(*curr)->dw_loc_next;
30165 }
30166 }
30167 if (loc == *start)
30168 loc->resolved_addr = 1;
30169 else
30170 {
30171 loc->replaced = 1;
30172 loc->dw_loc_next = *start;
30173 }
30174 }
30175 if (!*start)
30176 {
30177 remove_AT (die, a->dw_attr);
30178 ix--;
30179 }
30180 break;
30181 case dw_val_class_view_list:
30182 {
30183 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30184 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30185 dw_val_node *llnode
30186 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30187 /* If we no longer have a loclist, or it no longer needs
30188 views, drop this attribute. */
30189 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30190 {
30191 remove_AT (die, a->dw_attr);
30192 ix--;
30193 }
30194 break;
30195 }
30196 case dw_val_class_loc:
30197 {
30198 dw_loc_descr_ref l = AT_loc (a);
30199 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30200 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30201 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30202 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30203 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30204 with DW_FORM_ref referencing the same DIE as
30205 DW_OP_GNU_variable_value used to reference. */
30206 if (a->dw_attr == DW_AT_string_length
30207 && l
30208 && l->dw_loc_opc == DW_OP_GNU_variable_value
30209 && (l->dw_loc_next == NULL
30210 || (l->dw_loc_next->dw_loc_next == NULL
30211 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30212 {
30213 switch (optimize_string_length (a))
30214 {
30215 case -1:
30216 remove_AT (die, a->dw_attr);
30217 ix--;
30218 /* If we drop DW_AT_string_length, we need to drop also
30219 DW_AT_{string_length_,}byte_size. */
30220 remove_AT_byte_size = true;
30221 continue;
30222 default:
30223 break;
30224 case 1:
30225 /* Even if we keep the optimized DW_AT_string_length,
30226 it might have changed AT_class, so process it again. */
30227 ix--;
30228 continue;
30229 }
30230 }
30231 /* For -gdwarf-2 don't attempt to optimize
30232 DW_AT_data_member_location containing
30233 DW_OP_plus_uconst - older consumers might
30234 rely on it being that op instead of a more complex,
30235 but shorter, location description. */
30236 if ((dwarf_version > 2
30237 || a->dw_attr != DW_AT_data_member_location
30238 || l == NULL
30239 || l->dw_loc_opc != DW_OP_plus_uconst
30240 || l->dw_loc_next != NULL)
30241 && !resolve_addr_in_expr (a, l))
30242 {
30243 if (dwarf_split_debug_info)
30244 remove_loc_list_addr_table_entries (l);
30245 if (l != NULL
30246 && l->dw_loc_next == NULL
30247 && l->dw_loc_opc == DW_OP_addr
30248 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30249 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30250 && a->dw_attr == DW_AT_location)
30251 {
30252 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30253 remove_AT (die, a->dw_attr);
30254 ix--;
30255 optimize_location_into_implicit_ptr (die, decl);
30256 break;
30257 }
30258 if (a->dw_attr == DW_AT_string_length)
30259 /* If we drop DW_AT_string_length, we need to drop also
30260 DW_AT_{string_length_,}byte_size. */
30261 remove_AT_byte_size = true;
30262 remove_AT (die, a->dw_attr);
30263 ix--;
30264 }
30265 else
30266 mark_base_types (l);
30267 }
30268 break;
30269 case dw_val_class_addr:
30270 if (a->dw_attr == DW_AT_const_value
30271 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30272 {
30273 if (AT_index (a) != NOT_INDEXED)
30274 remove_addr_table_entry (a->dw_attr_val.val_entry);
30275 remove_AT (die, a->dw_attr);
30276 ix--;
30277 }
30278 if ((die->die_tag == DW_TAG_call_site
30279 && a->dw_attr == DW_AT_call_origin)
30280 || (die->die_tag == DW_TAG_GNU_call_site
30281 && a->dw_attr == DW_AT_abstract_origin))
30282 {
30283 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30284 dw_die_ref tdie = lookup_decl_die (tdecl);
30285 dw_die_ref cdie;
30286 if (tdie == NULL
30287 && DECL_EXTERNAL (tdecl)
30288 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30289 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30290 {
30291 dw_die_ref pdie = cdie;
30292 /* Make sure we don't add these DIEs into type units.
30293 We could emit skeleton DIEs for context (namespaces,
30294 outer structs/classes) and a skeleton DIE for the
30295 innermost context with DW_AT_signature pointing to the
30296 type unit. See PR78835. */
30297 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30298 pdie = pdie->die_parent;
30299 if (pdie == NULL)
30300 {
30301 /* Creating a full DIE for tdecl is overly expensive and
30302 at this point even wrong when in the LTO phase
30303 as it can end up generating new type DIEs we didn't
30304 output and thus optimize_external_refs will crash. */
30305 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30306 add_AT_flag (tdie, DW_AT_external, 1);
30307 add_AT_flag (tdie, DW_AT_declaration, 1);
30308 add_linkage_attr (tdie, tdecl);
30309 add_name_and_src_coords_attributes (tdie, tdecl, true);
30310 equate_decl_number_to_die (tdecl, tdie);
30311 }
30312 }
30313 if (tdie)
30314 {
30315 a->dw_attr_val.val_class = dw_val_class_die_ref;
30316 a->dw_attr_val.v.val_die_ref.die = tdie;
30317 a->dw_attr_val.v.val_die_ref.external = 0;
30318 }
30319 else
30320 {
30321 if (AT_index (a) != NOT_INDEXED)
30322 remove_addr_table_entry (a->dw_attr_val.val_entry);
30323 remove_AT (die, a->dw_attr);
30324 ix--;
30325 }
30326 }
30327 break;
30328 default:
30329 break;
30330 }
30331
30332 if (remove_AT_byte_size)
30333 remove_AT (die, dwarf_version >= 5
30334 ? DW_AT_string_length_byte_size
30335 : DW_AT_byte_size);
30336
30337 FOR_EACH_CHILD (die, c, resolve_addr (c));
30338 }
30339 \f
30340 /* Helper routines for optimize_location_lists.
30341 This pass tries to share identical local lists in .debug_loc
30342 section. */
30343
30344 /* Iteratively hash operands of LOC opcode into HSTATE. */
30345
30346 static void
30347 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30348 {
30349 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30350 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30351
30352 switch (loc->dw_loc_opc)
30353 {
30354 case DW_OP_const4u:
30355 case DW_OP_const8u:
30356 if (loc->dtprel)
30357 goto hash_addr;
30358 /* FALLTHRU */
30359 case DW_OP_const1u:
30360 case DW_OP_const1s:
30361 case DW_OP_const2u:
30362 case DW_OP_const2s:
30363 case DW_OP_const4s:
30364 case DW_OP_const8s:
30365 case DW_OP_constu:
30366 case DW_OP_consts:
30367 case DW_OP_pick:
30368 case DW_OP_plus_uconst:
30369 case DW_OP_breg0:
30370 case DW_OP_breg1:
30371 case DW_OP_breg2:
30372 case DW_OP_breg3:
30373 case DW_OP_breg4:
30374 case DW_OP_breg5:
30375 case DW_OP_breg6:
30376 case DW_OP_breg7:
30377 case DW_OP_breg8:
30378 case DW_OP_breg9:
30379 case DW_OP_breg10:
30380 case DW_OP_breg11:
30381 case DW_OP_breg12:
30382 case DW_OP_breg13:
30383 case DW_OP_breg14:
30384 case DW_OP_breg15:
30385 case DW_OP_breg16:
30386 case DW_OP_breg17:
30387 case DW_OP_breg18:
30388 case DW_OP_breg19:
30389 case DW_OP_breg20:
30390 case DW_OP_breg21:
30391 case DW_OP_breg22:
30392 case DW_OP_breg23:
30393 case DW_OP_breg24:
30394 case DW_OP_breg25:
30395 case DW_OP_breg26:
30396 case DW_OP_breg27:
30397 case DW_OP_breg28:
30398 case DW_OP_breg29:
30399 case DW_OP_breg30:
30400 case DW_OP_breg31:
30401 case DW_OP_regx:
30402 case DW_OP_fbreg:
30403 case DW_OP_piece:
30404 case DW_OP_deref_size:
30405 case DW_OP_xderef_size:
30406 hstate.add_object (val1->v.val_int);
30407 break;
30408 case DW_OP_skip:
30409 case DW_OP_bra:
30410 {
30411 int offset;
30412
30413 gcc_assert (val1->val_class == dw_val_class_loc);
30414 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30415 hstate.add_object (offset);
30416 }
30417 break;
30418 case DW_OP_implicit_value:
30419 hstate.add_object (val1->v.val_unsigned);
30420 switch (val2->val_class)
30421 {
30422 case dw_val_class_const:
30423 hstate.add_object (val2->v.val_int);
30424 break;
30425 case dw_val_class_vec:
30426 {
30427 unsigned int elt_size = val2->v.val_vec.elt_size;
30428 unsigned int len = val2->v.val_vec.length;
30429
30430 hstate.add_int (elt_size);
30431 hstate.add_int (len);
30432 hstate.add (val2->v.val_vec.array, len * elt_size);
30433 }
30434 break;
30435 case dw_val_class_const_double:
30436 hstate.add_object (val2->v.val_double.low);
30437 hstate.add_object (val2->v.val_double.high);
30438 break;
30439 case dw_val_class_wide_int:
30440 hstate.add (val2->v.val_wide->get_val (),
30441 get_full_len (*val2->v.val_wide)
30442 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30443 break;
30444 case dw_val_class_addr:
30445 inchash::add_rtx (val2->v.val_addr, hstate);
30446 break;
30447 default:
30448 gcc_unreachable ();
30449 }
30450 break;
30451 case DW_OP_bregx:
30452 case DW_OP_bit_piece:
30453 hstate.add_object (val1->v.val_int);
30454 hstate.add_object (val2->v.val_int);
30455 break;
30456 case DW_OP_addr:
30457 hash_addr:
30458 if (loc->dtprel)
30459 {
30460 unsigned char dtprel = 0xd1;
30461 hstate.add_object (dtprel);
30462 }
30463 inchash::add_rtx (val1->v.val_addr, hstate);
30464 break;
30465 case DW_OP_GNU_addr_index:
30466 case DW_OP_GNU_const_index:
30467 {
30468 if (loc->dtprel)
30469 {
30470 unsigned char dtprel = 0xd1;
30471 hstate.add_object (dtprel);
30472 }
30473 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30474 }
30475 break;
30476 case DW_OP_implicit_pointer:
30477 case DW_OP_GNU_implicit_pointer:
30478 hstate.add_int (val2->v.val_int);
30479 break;
30480 case DW_OP_entry_value:
30481 case DW_OP_GNU_entry_value:
30482 hstate.add_object (val1->v.val_loc);
30483 break;
30484 case DW_OP_regval_type:
30485 case DW_OP_deref_type:
30486 case DW_OP_GNU_regval_type:
30487 case DW_OP_GNU_deref_type:
30488 {
30489 unsigned int byte_size
30490 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30491 unsigned int encoding
30492 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30493 hstate.add_object (val1->v.val_int);
30494 hstate.add_object (byte_size);
30495 hstate.add_object (encoding);
30496 }
30497 break;
30498 case DW_OP_convert:
30499 case DW_OP_reinterpret:
30500 case DW_OP_GNU_convert:
30501 case DW_OP_GNU_reinterpret:
30502 if (val1->val_class == dw_val_class_unsigned_const)
30503 {
30504 hstate.add_object (val1->v.val_unsigned);
30505 break;
30506 }
30507 /* FALLTHRU */
30508 case DW_OP_const_type:
30509 case DW_OP_GNU_const_type:
30510 {
30511 unsigned int byte_size
30512 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30513 unsigned int encoding
30514 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30515 hstate.add_object (byte_size);
30516 hstate.add_object (encoding);
30517 if (loc->dw_loc_opc != DW_OP_const_type
30518 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30519 break;
30520 hstate.add_object (val2->val_class);
30521 switch (val2->val_class)
30522 {
30523 case dw_val_class_const:
30524 hstate.add_object (val2->v.val_int);
30525 break;
30526 case dw_val_class_vec:
30527 {
30528 unsigned int elt_size = val2->v.val_vec.elt_size;
30529 unsigned int len = val2->v.val_vec.length;
30530
30531 hstate.add_object (elt_size);
30532 hstate.add_object (len);
30533 hstate.add (val2->v.val_vec.array, len * elt_size);
30534 }
30535 break;
30536 case dw_val_class_const_double:
30537 hstate.add_object (val2->v.val_double.low);
30538 hstate.add_object (val2->v.val_double.high);
30539 break;
30540 case dw_val_class_wide_int:
30541 hstate.add (val2->v.val_wide->get_val (),
30542 get_full_len (*val2->v.val_wide)
30543 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30544 break;
30545 default:
30546 gcc_unreachable ();
30547 }
30548 }
30549 break;
30550
30551 default:
30552 /* Other codes have no operands. */
30553 break;
30554 }
30555 }
30556
30557 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30558
30559 static inline void
30560 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30561 {
30562 dw_loc_descr_ref l;
30563 bool sizes_computed = false;
30564 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30565 size_of_locs (loc);
30566
30567 for (l = loc; l != NULL; l = l->dw_loc_next)
30568 {
30569 enum dwarf_location_atom opc = l->dw_loc_opc;
30570 hstate.add_object (opc);
30571 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30572 {
30573 size_of_locs (loc);
30574 sizes_computed = true;
30575 }
30576 hash_loc_operands (l, hstate);
30577 }
30578 }
30579
30580 /* Compute hash of the whole location list LIST_HEAD. */
30581
30582 static inline void
30583 hash_loc_list (dw_loc_list_ref list_head)
30584 {
30585 dw_loc_list_ref curr = list_head;
30586 inchash::hash hstate;
30587
30588 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30589 {
30590 hstate.add (curr->begin, strlen (curr->begin) + 1);
30591 hstate.add (curr->end, strlen (curr->end) + 1);
30592 hstate.add_object (curr->vbegin);
30593 hstate.add_object (curr->vend);
30594 if (curr->section)
30595 hstate.add (curr->section, strlen (curr->section) + 1);
30596 hash_locs (curr->expr, hstate);
30597 }
30598 list_head->hash = hstate.end ();
30599 }
30600
30601 /* Return true if X and Y opcodes have the same operands. */
30602
30603 static inline bool
30604 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30605 {
30606 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30607 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30608 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30609 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30610
30611 switch (x->dw_loc_opc)
30612 {
30613 case DW_OP_const4u:
30614 case DW_OP_const8u:
30615 if (x->dtprel)
30616 goto hash_addr;
30617 /* FALLTHRU */
30618 case DW_OP_const1u:
30619 case DW_OP_const1s:
30620 case DW_OP_const2u:
30621 case DW_OP_const2s:
30622 case DW_OP_const4s:
30623 case DW_OP_const8s:
30624 case DW_OP_constu:
30625 case DW_OP_consts:
30626 case DW_OP_pick:
30627 case DW_OP_plus_uconst:
30628 case DW_OP_breg0:
30629 case DW_OP_breg1:
30630 case DW_OP_breg2:
30631 case DW_OP_breg3:
30632 case DW_OP_breg4:
30633 case DW_OP_breg5:
30634 case DW_OP_breg6:
30635 case DW_OP_breg7:
30636 case DW_OP_breg8:
30637 case DW_OP_breg9:
30638 case DW_OP_breg10:
30639 case DW_OP_breg11:
30640 case DW_OP_breg12:
30641 case DW_OP_breg13:
30642 case DW_OP_breg14:
30643 case DW_OP_breg15:
30644 case DW_OP_breg16:
30645 case DW_OP_breg17:
30646 case DW_OP_breg18:
30647 case DW_OP_breg19:
30648 case DW_OP_breg20:
30649 case DW_OP_breg21:
30650 case DW_OP_breg22:
30651 case DW_OP_breg23:
30652 case DW_OP_breg24:
30653 case DW_OP_breg25:
30654 case DW_OP_breg26:
30655 case DW_OP_breg27:
30656 case DW_OP_breg28:
30657 case DW_OP_breg29:
30658 case DW_OP_breg30:
30659 case DW_OP_breg31:
30660 case DW_OP_regx:
30661 case DW_OP_fbreg:
30662 case DW_OP_piece:
30663 case DW_OP_deref_size:
30664 case DW_OP_xderef_size:
30665 return valx1->v.val_int == valy1->v.val_int;
30666 case DW_OP_skip:
30667 case DW_OP_bra:
30668 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30669 can cause irrelevant differences in dw_loc_addr. */
30670 gcc_assert (valx1->val_class == dw_val_class_loc
30671 && valy1->val_class == dw_val_class_loc
30672 && (dwarf_split_debug_info
30673 || x->dw_loc_addr == y->dw_loc_addr));
30674 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30675 case DW_OP_implicit_value:
30676 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30677 || valx2->val_class != valy2->val_class)
30678 return false;
30679 switch (valx2->val_class)
30680 {
30681 case dw_val_class_const:
30682 return valx2->v.val_int == valy2->v.val_int;
30683 case dw_val_class_vec:
30684 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30685 && valx2->v.val_vec.length == valy2->v.val_vec.length
30686 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30687 valx2->v.val_vec.elt_size
30688 * valx2->v.val_vec.length) == 0;
30689 case dw_val_class_const_double:
30690 return valx2->v.val_double.low == valy2->v.val_double.low
30691 && valx2->v.val_double.high == valy2->v.val_double.high;
30692 case dw_val_class_wide_int:
30693 return *valx2->v.val_wide == *valy2->v.val_wide;
30694 case dw_val_class_addr:
30695 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30696 default:
30697 gcc_unreachable ();
30698 }
30699 case DW_OP_bregx:
30700 case DW_OP_bit_piece:
30701 return valx1->v.val_int == valy1->v.val_int
30702 && valx2->v.val_int == valy2->v.val_int;
30703 case DW_OP_addr:
30704 hash_addr:
30705 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30706 case DW_OP_GNU_addr_index:
30707 case DW_OP_GNU_const_index:
30708 {
30709 rtx ax1 = valx1->val_entry->addr.rtl;
30710 rtx ay1 = valy1->val_entry->addr.rtl;
30711 return rtx_equal_p (ax1, ay1);
30712 }
30713 case DW_OP_implicit_pointer:
30714 case DW_OP_GNU_implicit_pointer:
30715 return valx1->val_class == dw_val_class_die_ref
30716 && valx1->val_class == valy1->val_class
30717 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30718 && valx2->v.val_int == valy2->v.val_int;
30719 case DW_OP_entry_value:
30720 case DW_OP_GNU_entry_value:
30721 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30722 case DW_OP_const_type:
30723 case DW_OP_GNU_const_type:
30724 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30725 || valx2->val_class != valy2->val_class)
30726 return false;
30727 switch (valx2->val_class)
30728 {
30729 case dw_val_class_const:
30730 return valx2->v.val_int == valy2->v.val_int;
30731 case dw_val_class_vec:
30732 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30733 && valx2->v.val_vec.length == valy2->v.val_vec.length
30734 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30735 valx2->v.val_vec.elt_size
30736 * valx2->v.val_vec.length) == 0;
30737 case dw_val_class_const_double:
30738 return valx2->v.val_double.low == valy2->v.val_double.low
30739 && valx2->v.val_double.high == valy2->v.val_double.high;
30740 case dw_val_class_wide_int:
30741 return *valx2->v.val_wide == *valy2->v.val_wide;
30742 default:
30743 gcc_unreachable ();
30744 }
30745 case DW_OP_regval_type:
30746 case DW_OP_deref_type:
30747 case DW_OP_GNU_regval_type:
30748 case DW_OP_GNU_deref_type:
30749 return valx1->v.val_int == valy1->v.val_int
30750 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30751 case DW_OP_convert:
30752 case DW_OP_reinterpret:
30753 case DW_OP_GNU_convert:
30754 case DW_OP_GNU_reinterpret:
30755 if (valx1->val_class != valy1->val_class)
30756 return false;
30757 if (valx1->val_class == dw_val_class_unsigned_const)
30758 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30759 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30760 case DW_OP_GNU_parameter_ref:
30761 return valx1->val_class == dw_val_class_die_ref
30762 && valx1->val_class == valy1->val_class
30763 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30764 default:
30765 /* Other codes have no operands. */
30766 return true;
30767 }
30768 }
30769
30770 /* Return true if DWARF location expressions X and Y are the same. */
30771
30772 static inline bool
30773 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30774 {
30775 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30776 if (x->dw_loc_opc != y->dw_loc_opc
30777 || x->dtprel != y->dtprel
30778 || !compare_loc_operands (x, y))
30779 break;
30780 return x == NULL && y == NULL;
30781 }
30782
30783 /* Hashtable helpers. */
30784
30785 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30786 {
30787 static inline hashval_t hash (const dw_loc_list_struct *);
30788 static inline bool equal (const dw_loc_list_struct *,
30789 const dw_loc_list_struct *);
30790 };
30791
30792 /* Return precomputed hash of location list X. */
30793
30794 inline hashval_t
30795 loc_list_hasher::hash (const dw_loc_list_struct *x)
30796 {
30797 return x->hash;
30798 }
30799
30800 /* Return true if location lists A and B are the same. */
30801
30802 inline bool
30803 loc_list_hasher::equal (const dw_loc_list_struct *a,
30804 const dw_loc_list_struct *b)
30805 {
30806 if (a == b)
30807 return 1;
30808 if (a->hash != b->hash)
30809 return 0;
30810 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30811 if (strcmp (a->begin, b->begin) != 0
30812 || strcmp (a->end, b->end) != 0
30813 || (a->section == NULL) != (b->section == NULL)
30814 || (a->section && strcmp (a->section, b->section) != 0)
30815 || a->vbegin != b->vbegin || a->vend != b->vend
30816 || !compare_locs (a->expr, b->expr))
30817 break;
30818 return a == NULL && b == NULL;
30819 }
30820
30821 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30822
30823
30824 /* Recursively optimize location lists referenced from DIE
30825 children and share them whenever possible. */
30826
30827 static void
30828 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30829 {
30830 dw_die_ref c;
30831 dw_attr_node *a;
30832 unsigned ix;
30833 dw_loc_list_struct **slot;
30834 bool drop_locviews = false;
30835 bool has_locviews = false;
30836
30837 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30838 if (AT_class (a) == dw_val_class_loc_list)
30839 {
30840 dw_loc_list_ref list = AT_loc_list (a);
30841 /* TODO: perform some optimizations here, before hashing
30842 it and storing into the hash table. */
30843 hash_loc_list (list);
30844 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30845 if (*slot == NULL)
30846 {
30847 *slot = list;
30848 if (loc_list_has_views (list))
30849 gcc_assert (list->vl_symbol);
30850 else if (list->vl_symbol)
30851 {
30852 drop_locviews = true;
30853 list->vl_symbol = NULL;
30854 }
30855 }
30856 else
30857 {
30858 if (list->vl_symbol && !(*slot)->vl_symbol)
30859 drop_locviews = true;
30860 a->dw_attr_val.v.val_loc_list = *slot;
30861 }
30862 }
30863 else if (AT_class (a) == dw_val_class_view_list)
30864 {
30865 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30866 has_locviews = true;
30867 }
30868
30869
30870 if (drop_locviews && has_locviews)
30871 remove_AT (die, DW_AT_GNU_locviews);
30872
30873 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30874 }
30875
30876
30877 /* Recursively assign each location list a unique index into the debug_addr
30878 section. */
30879
30880 static void
30881 index_location_lists (dw_die_ref die)
30882 {
30883 dw_die_ref c;
30884 dw_attr_node *a;
30885 unsigned ix;
30886
30887 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30888 if (AT_class (a) == dw_val_class_loc_list)
30889 {
30890 dw_loc_list_ref list = AT_loc_list (a);
30891 dw_loc_list_ref curr;
30892 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30893 {
30894 /* Don't index an entry that has already been indexed
30895 or won't be output. Make sure skip_loc_list_entry doesn't
30896 call size_of_locs, because that might cause circular dependency,
30897 index_location_lists requiring address table indexes to be
30898 computed, but adding new indexes through add_addr_table_entry
30899 and address table index computation requiring no new additions
30900 to the hash table. In the rare case of DWARF[234] >= 64KB
30901 location expression, we'll just waste unused address table entry
30902 for it. */
30903 if (curr->begin_entry != NULL
30904 || skip_loc_list_entry (curr))
30905 continue;
30906
30907 curr->begin_entry
30908 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30909 }
30910 }
30911
30912 FOR_EACH_CHILD (die, c, index_location_lists (c));
30913 }
30914
30915 /* Optimize location lists referenced from DIE
30916 children and share them whenever possible. */
30917
30918 static void
30919 optimize_location_lists (dw_die_ref die)
30920 {
30921 loc_list_hash_type htab (500);
30922 optimize_location_lists_1 (die, &htab);
30923 }
30924 \f
30925 /* Traverse the limbo die list, and add parent/child links. The only
30926 dies without parents that should be here are concrete instances of
30927 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
30928 For concrete instances, we can get the parent die from the abstract
30929 instance. */
30930
30931 static void
30932 flush_limbo_die_list (void)
30933 {
30934 limbo_die_node *node;
30935
30936 /* get_context_die calls force_decl_die, which can put new DIEs on the
30937 limbo list in LTO mode when nested functions are put in a different
30938 partition than that of their parent function. */
30939 while ((node = limbo_die_list))
30940 {
30941 dw_die_ref die = node->die;
30942 limbo_die_list = node->next;
30943
30944 if (die->die_parent == NULL)
30945 {
30946 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
30947
30948 if (origin && origin->die_parent)
30949 add_child_die (origin->die_parent, die);
30950 else if (is_cu_die (die))
30951 ;
30952 else if (seen_error ())
30953 /* It's OK to be confused by errors in the input. */
30954 add_child_die (comp_unit_die (), die);
30955 else
30956 {
30957 /* In certain situations, the lexical block containing a
30958 nested function can be optimized away, which results
30959 in the nested function die being orphaned. Likewise
30960 with the return type of that nested function. Force
30961 this to be a child of the containing function.
30962
30963 It may happen that even the containing function got fully
30964 inlined and optimized out. In that case we are lost and
30965 assign the empty child. This should not be big issue as
30966 the function is likely unreachable too. */
30967 gcc_assert (node->created_for);
30968
30969 if (DECL_P (node->created_for))
30970 origin = get_context_die (DECL_CONTEXT (node->created_for));
30971 else if (TYPE_P (node->created_for))
30972 origin = scope_die_for (node->created_for, comp_unit_die ());
30973 else
30974 origin = comp_unit_die ();
30975
30976 add_child_die (origin, die);
30977 }
30978 }
30979 }
30980 }
30981
30982 /* Reset DIEs so we can output them again. */
30983
30984 static void
30985 reset_dies (dw_die_ref die)
30986 {
30987 dw_die_ref c;
30988
30989 /* Remove stuff we re-generate. */
30990 die->die_mark = 0;
30991 die->die_offset = 0;
30992 die->die_abbrev = 0;
30993 remove_AT (die, DW_AT_sibling);
30994
30995 FOR_EACH_CHILD (die, c, reset_dies (c));
30996 }
30997
30998 /* Output stuff that dwarf requires at the end of every file,
30999 and generate the DWARF-2 debugging info. */
31000
31001 static void
31002 dwarf2out_finish (const char *)
31003 {
31004 comdat_type_node *ctnode;
31005 dw_die_ref main_comp_unit_die;
31006 unsigned char checksum[16];
31007 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31008
31009 /* Flush out any latecomers to the limbo party. */
31010 flush_limbo_die_list ();
31011
31012 if (inline_entry_data_table)
31013 gcc_assert (inline_entry_data_table->elements () == 0);
31014
31015 if (flag_checking)
31016 {
31017 verify_die (comp_unit_die ());
31018 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31019 verify_die (node->die);
31020 }
31021
31022 /* We shouldn't have any symbols with delayed asm names for
31023 DIEs generated after early finish. */
31024 gcc_assert (deferred_asm_name == NULL);
31025
31026 gen_remaining_tmpl_value_param_die_attribute ();
31027
31028 if (flag_generate_lto || flag_generate_offload)
31029 {
31030 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31031
31032 /* Prune stuff so that dwarf2out_finish runs successfully
31033 for the fat part of the object. */
31034 reset_dies (comp_unit_die ());
31035 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31036 reset_dies (node->die);
31037
31038 hash_table<comdat_type_hasher> comdat_type_table (100);
31039 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31040 {
31041 comdat_type_node **slot
31042 = comdat_type_table.find_slot (ctnode, INSERT);
31043
31044 /* Don't reset types twice. */
31045 if (*slot != HTAB_EMPTY_ENTRY)
31046 continue;
31047
31048 /* Add a pointer to the line table for the main compilation unit
31049 so that the debugger can make sense of DW_AT_decl_file
31050 attributes. */
31051 if (debug_info_level >= DINFO_LEVEL_TERSE)
31052 reset_dies (ctnode->root_die);
31053
31054 *slot = ctnode;
31055 }
31056
31057 /* Reset die CU symbol so we don't output it twice. */
31058 comp_unit_die ()->die_id.die_symbol = NULL;
31059
31060 /* Remove DW_AT_macro from the early output. */
31061 if (have_macinfo)
31062 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31063
31064 /* Remove indirect string decisions. */
31065 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31066 }
31067
31068 #if ENABLE_ASSERT_CHECKING
31069 {
31070 dw_die_ref die = comp_unit_die (), c;
31071 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31072 }
31073 #endif
31074 resolve_addr (comp_unit_die ());
31075 move_marked_base_types ();
31076
31077 /* Initialize sections and labels used for actual assembler output. */
31078 unsigned generation = init_sections_and_labels (false);
31079
31080 /* Traverse the DIE's and add sibling attributes to those DIE's that
31081 have children. */
31082 add_sibling_attributes (comp_unit_die ());
31083 limbo_die_node *node;
31084 for (node = cu_die_list; node; node = node->next)
31085 add_sibling_attributes (node->die);
31086 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31087 add_sibling_attributes (ctnode->root_die);
31088
31089 /* When splitting DWARF info, we put some attributes in the
31090 skeleton compile_unit DIE that remains in the .o, while
31091 most attributes go in the DWO compile_unit_die. */
31092 if (dwarf_split_debug_info)
31093 {
31094 limbo_die_node *cu;
31095 main_comp_unit_die = gen_compile_unit_die (NULL);
31096 if (dwarf_version >= 5)
31097 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31098 cu = limbo_die_list;
31099 gcc_assert (cu->die == main_comp_unit_die);
31100 limbo_die_list = limbo_die_list->next;
31101 cu->next = cu_die_list;
31102 cu_die_list = cu;
31103 }
31104 else
31105 main_comp_unit_die = comp_unit_die ();
31106
31107 /* Output a terminator label for the .text section. */
31108 switch_to_section (text_section);
31109 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31110 if (cold_text_section)
31111 {
31112 switch_to_section (cold_text_section);
31113 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31114 }
31115
31116 /* We can only use the low/high_pc attributes if all of the code was
31117 in .text. */
31118 if (!have_multiple_function_sections
31119 || (dwarf_version < 3 && dwarf_strict))
31120 {
31121 /* Don't add if the CU has no associated code. */
31122 if (text_section_used)
31123 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31124 text_end_label, true);
31125 }
31126 else
31127 {
31128 unsigned fde_idx;
31129 dw_fde_ref fde;
31130 bool range_list_added = false;
31131
31132 if (text_section_used)
31133 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31134 text_end_label, &range_list_added, true);
31135 if (cold_text_section_used)
31136 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31137 cold_end_label, &range_list_added, true);
31138
31139 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31140 {
31141 if (DECL_IGNORED_P (fde->decl))
31142 continue;
31143 if (!fde->in_std_section)
31144 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31145 fde->dw_fde_end, &range_list_added,
31146 true);
31147 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31148 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31149 fde->dw_fde_second_end, &range_list_added,
31150 true);
31151 }
31152
31153 if (range_list_added)
31154 {
31155 /* We need to give .debug_loc and .debug_ranges an appropriate
31156 "base address". Use zero so that these addresses become
31157 absolute. Historically, we've emitted the unexpected
31158 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31159 Emit both to give time for other tools to adapt. */
31160 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31161 if (! dwarf_strict && dwarf_version < 4)
31162 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31163
31164 add_ranges (NULL);
31165 }
31166 }
31167
31168 /* AIX Assembler inserts the length, so adjust the reference to match the
31169 offset expected by debuggers. */
31170 strcpy (dl_section_ref, debug_line_section_label);
31171 if (XCOFF_DEBUGGING_INFO)
31172 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31173
31174 if (debug_info_level >= DINFO_LEVEL_TERSE)
31175 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31176 dl_section_ref);
31177
31178 if (have_macinfo)
31179 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31180 macinfo_section_label);
31181
31182 if (dwarf_split_debug_info)
31183 {
31184 if (have_location_lists)
31185 {
31186 if (dwarf_version >= 5)
31187 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31188 loc_section_label);
31189 /* optimize_location_lists calculates the size of the lists,
31190 so index them first, and assign indices to the entries.
31191 Although optimize_location_lists will remove entries from
31192 the table, it only does so for duplicates, and therefore
31193 only reduces ref_counts to 1. */
31194 index_location_lists (comp_unit_die ());
31195 }
31196
31197 if (addr_index_table != NULL)
31198 {
31199 unsigned int index = 0;
31200 addr_index_table
31201 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31202 (&index);
31203 }
31204 }
31205
31206 loc_list_idx = 0;
31207 if (have_location_lists)
31208 {
31209 optimize_location_lists (comp_unit_die ());
31210 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31211 if (dwarf_version >= 5 && dwarf_split_debug_info)
31212 assign_location_list_indexes (comp_unit_die ());
31213 }
31214
31215 save_macinfo_strings ();
31216
31217 if (dwarf_split_debug_info)
31218 {
31219 unsigned int index = 0;
31220
31221 /* Add attributes common to skeleton compile_units and
31222 type_units. Because these attributes include strings, it
31223 must be done before freezing the string table. Top-level
31224 skeleton die attrs are added when the skeleton type unit is
31225 created, so ensure it is created by this point. */
31226 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31227 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31228 }
31229
31230 /* Output all of the compilation units. We put the main one last so that
31231 the offsets are available to output_pubnames. */
31232 for (node = cu_die_list; node; node = node->next)
31233 output_comp_unit (node->die, 0, NULL);
31234
31235 hash_table<comdat_type_hasher> comdat_type_table (100);
31236 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31237 {
31238 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31239
31240 /* Don't output duplicate types. */
31241 if (*slot != HTAB_EMPTY_ENTRY)
31242 continue;
31243
31244 /* Add a pointer to the line table for the main compilation unit
31245 so that the debugger can make sense of DW_AT_decl_file
31246 attributes. */
31247 if (debug_info_level >= DINFO_LEVEL_TERSE)
31248 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31249 (!dwarf_split_debug_info
31250 ? dl_section_ref
31251 : debug_skeleton_line_section_label));
31252
31253 output_comdat_type_unit (ctnode);
31254 *slot = ctnode;
31255 }
31256
31257 if (dwarf_split_debug_info)
31258 {
31259 int mark;
31260 struct md5_ctx ctx;
31261
31262 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31263 index_rnglists ();
31264
31265 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31266 md5_init_ctx (&ctx);
31267 mark = 0;
31268 die_checksum (comp_unit_die (), &ctx, &mark);
31269 unmark_all_dies (comp_unit_die ());
31270 md5_finish_ctx (&ctx, checksum);
31271
31272 if (dwarf_version < 5)
31273 {
31274 /* Use the first 8 bytes of the checksum as the dwo_id,
31275 and add it to both comp-unit DIEs. */
31276 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31277 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31278 }
31279
31280 /* Add the base offset of the ranges table to the skeleton
31281 comp-unit DIE. */
31282 if (!vec_safe_is_empty (ranges_table))
31283 {
31284 if (dwarf_version >= 5)
31285 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31286 ranges_base_label);
31287 else
31288 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31289 ranges_section_label);
31290 }
31291
31292 switch_to_section (debug_addr_section);
31293 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31294 output_addr_table ();
31295 }
31296
31297 /* Output the main compilation unit if non-empty or if .debug_macinfo
31298 or .debug_macro will be emitted. */
31299 output_comp_unit (comp_unit_die (), have_macinfo,
31300 dwarf_split_debug_info ? checksum : NULL);
31301
31302 if (dwarf_split_debug_info && info_section_emitted)
31303 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31304
31305 /* Output the abbreviation table. */
31306 if (vec_safe_length (abbrev_die_table) != 1)
31307 {
31308 switch_to_section (debug_abbrev_section);
31309 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31310 output_abbrev_section ();
31311 }
31312
31313 /* Output location list section if necessary. */
31314 if (have_location_lists)
31315 {
31316 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31317 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31318 /* Output the location lists info. */
31319 switch_to_section (debug_loc_section);
31320 if (dwarf_version >= 5)
31321 {
31322 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31323 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31324 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31325 dw2_asm_output_data (4, 0xffffffff,
31326 "Initial length escape value indicating "
31327 "64-bit DWARF extension");
31328 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31329 "Length of Location Lists");
31330 ASM_OUTPUT_LABEL (asm_out_file, l1);
31331 output_dwarf_version ();
31332 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31333 dw2_asm_output_data (1, 0, "Segment Size");
31334 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31335 "Offset Entry Count");
31336 }
31337 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31338 if (dwarf_version >= 5 && dwarf_split_debug_info)
31339 {
31340 unsigned int save_loc_list_idx = loc_list_idx;
31341 loc_list_idx = 0;
31342 output_loclists_offsets (comp_unit_die ());
31343 gcc_assert (save_loc_list_idx == loc_list_idx);
31344 }
31345 output_location_lists (comp_unit_die ());
31346 if (dwarf_version >= 5)
31347 ASM_OUTPUT_LABEL (asm_out_file, l2);
31348 }
31349
31350 output_pubtables ();
31351
31352 /* Output the address range information if a CU (.debug_info section)
31353 was emitted. We output an empty table even if we had no functions
31354 to put in it. This because the consumer has no way to tell the
31355 difference between an empty table that we omitted and failure to
31356 generate a table that would have contained data. */
31357 if (info_section_emitted)
31358 {
31359 switch_to_section (debug_aranges_section);
31360 output_aranges ();
31361 }
31362
31363 /* Output ranges section if necessary. */
31364 if (!vec_safe_is_empty (ranges_table))
31365 {
31366 if (dwarf_version >= 5)
31367 output_rnglists (generation);
31368 else
31369 output_ranges ();
31370 }
31371
31372 /* Have to end the macro section. */
31373 if (have_macinfo)
31374 {
31375 switch_to_section (debug_macinfo_section);
31376 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31377 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31378 : debug_skeleton_line_section_label, false);
31379 dw2_asm_output_data (1, 0, "End compilation unit");
31380 }
31381
31382 /* Output the source line correspondence table. We must do this
31383 even if there is no line information. Otherwise, on an empty
31384 translation unit, we will generate a present, but empty,
31385 .debug_info section. IRIX 6.5 `nm' will then complain when
31386 examining the file. This is done late so that any filenames
31387 used by the debug_info section are marked as 'used'. */
31388 switch_to_section (debug_line_section);
31389 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31390 if (! output_asm_line_debug_info ())
31391 output_line_info (false);
31392
31393 if (dwarf_split_debug_info && info_section_emitted)
31394 {
31395 switch_to_section (debug_skeleton_line_section);
31396 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31397 output_line_info (true);
31398 }
31399
31400 /* If we emitted any indirect strings, output the string table too. */
31401 if (debug_str_hash || skeleton_debug_str_hash)
31402 output_indirect_strings ();
31403 if (debug_line_str_hash)
31404 {
31405 switch_to_section (debug_line_str_section);
31406 const enum dwarf_form form = DW_FORM_line_strp;
31407 debug_line_str_hash->traverse<enum dwarf_form,
31408 output_indirect_string> (form);
31409 }
31410
31411 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31412 symview_upper_bound = 0;
31413 if (zero_view_p)
31414 bitmap_clear (zero_view_p);
31415 }
31416
31417 /* Returns a hash value for X (which really is a variable_value_struct). */
31418
31419 inline hashval_t
31420 variable_value_hasher::hash (variable_value_struct *x)
31421 {
31422 return (hashval_t) x->decl_id;
31423 }
31424
31425 /* Return nonzero if decl_id of variable_value_struct X is the same as
31426 UID of decl Y. */
31427
31428 inline bool
31429 variable_value_hasher::equal (variable_value_struct *x, tree y)
31430 {
31431 return x->decl_id == DECL_UID (y);
31432 }
31433
31434 /* Helper function for resolve_variable_value, handle
31435 DW_OP_GNU_variable_value in one location expression.
31436 Return true if exprloc has been changed into loclist. */
31437
31438 static bool
31439 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31440 {
31441 dw_loc_descr_ref next;
31442 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31443 {
31444 next = loc->dw_loc_next;
31445 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31446 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31447 continue;
31448
31449 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31450 if (DECL_CONTEXT (decl) != current_function_decl)
31451 continue;
31452
31453 dw_die_ref ref = lookup_decl_die (decl);
31454 if (ref)
31455 {
31456 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31457 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31458 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31459 continue;
31460 }
31461 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31462 if (l == NULL)
31463 continue;
31464 if (l->dw_loc_next)
31465 {
31466 if (AT_class (a) != dw_val_class_loc)
31467 continue;
31468 switch (a->dw_attr)
31469 {
31470 /* Following attributes allow both exprloc and loclist
31471 classes, so we can change them into a loclist. */
31472 case DW_AT_location:
31473 case DW_AT_string_length:
31474 case DW_AT_return_addr:
31475 case DW_AT_data_member_location:
31476 case DW_AT_frame_base:
31477 case DW_AT_segment:
31478 case DW_AT_static_link:
31479 case DW_AT_use_location:
31480 case DW_AT_vtable_elem_location:
31481 if (prev)
31482 {
31483 prev->dw_loc_next = NULL;
31484 prepend_loc_descr_to_each (l, AT_loc (a));
31485 }
31486 if (next)
31487 add_loc_descr_to_each (l, next);
31488 a->dw_attr_val.val_class = dw_val_class_loc_list;
31489 a->dw_attr_val.val_entry = NULL;
31490 a->dw_attr_val.v.val_loc_list = l;
31491 have_location_lists = true;
31492 return true;
31493 /* Following attributes allow both exprloc and reference,
31494 so if the whole expression is DW_OP_GNU_variable_value alone
31495 we could transform it into reference. */
31496 case DW_AT_byte_size:
31497 case DW_AT_bit_size:
31498 case DW_AT_lower_bound:
31499 case DW_AT_upper_bound:
31500 case DW_AT_bit_stride:
31501 case DW_AT_count:
31502 case DW_AT_allocated:
31503 case DW_AT_associated:
31504 case DW_AT_byte_stride:
31505 if (prev == NULL && next == NULL)
31506 break;
31507 /* FALLTHRU */
31508 default:
31509 if (dwarf_strict)
31510 continue;
31511 break;
31512 }
31513 /* Create DW_TAG_variable that we can refer to. */
31514 gen_decl_die (decl, NULL_TREE, NULL,
31515 lookup_decl_die (current_function_decl));
31516 ref = lookup_decl_die (decl);
31517 if (ref)
31518 {
31519 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31520 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31521 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31522 }
31523 continue;
31524 }
31525 if (prev)
31526 {
31527 prev->dw_loc_next = l->expr;
31528 add_loc_descr (&prev->dw_loc_next, next);
31529 free_loc_descr (loc, NULL);
31530 next = prev->dw_loc_next;
31531 }
31532 else
31533 {
31534 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31535 add_loc_descr (&loc, next);
31536 next = loc;
31537 }
31538 loc = prev;
31539 }
31540 return false;
31541 }
31542
31543 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31544
31545 static void
31546 resolve_variable_value (dw_die_ref die)
31547 {
31548 dw_attr_node *a;
31549 dw_loc_list_ref loc;
31550 unsigned ix;
31551
31552 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31553 switch (AT_class (a))
31554 {
31555 case dw_val_class_loc:
31556 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31557 break;
31558 /* FALLTHRU */
31559 case dw_val_class_loc_list:
31560 loc = AT_loc_list (a);
31561 gcc_assert (loc);
31562 for (; loc; loc = loc->dw_loc_next)
31563 resolve_variable_value_in_expr (a, loc->expr);
31564 break;
31565 default:
31566 break;
31567 }
31568 }
31569
31570 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31571 temporaries in the current function. */
31572
31573 static void
31574 resolve_variable_values (void)
31575 {
31576 if (!variable_value_hash || !current_function_decl)
31577 return;
31578
31579 struct variable_value_struct *node
31580 = variable_value_hash->find_with_hash (current_function_decl,
31581 DECL_UID (current_function_decl));
31582
31583 if (node == NULL)
31584 return;
31585
31586 unsigned int i;
31587 dw_die_ref die;
31588 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31589 resolve_variable_value (die);
31590 }
31591
31592 /* Helper function for note_variable_value, handle one location
31593 expression. */
31594
31595 static void
31596 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31597 {
31598 for (; loc; loc = loc->dw_loc_next)
31599 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31600 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31601 {
31602 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31603 dw_die_ref ref = lookup_decl_die (decl);
31604 if (! ref && (flag_generate_lto || flag_generate_offload))
31605 {
31606 /* ??? This is somewhat a hack because we do not create DIEs
31607 for variables not in BLOCK trees early but when generating
31608 early LTO output we need the dw_val_class_decl_ref to be
31609 fully resolved. For fat LTO objects we'd also like to
31610 undo this after LTO dwarf output. */
31611 gcc_assert (DECL_CONTEXT (decl));
31612 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31613 gcc_assert (ctx != NULL);
31614 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31615 ref = lookup_decl_die (decl);
31616 gcc_assert (ref != NULL);
31617 }
31618 if (ref)
31619 {
31620 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31621 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31622 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31623 continue;
31624 }
31625 if (VAR_P (decl)
31626 && DECL_CONTEXT (decl)
31627 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31628 && lookup_decl_die (DECL_CONTEXT (decl)))
31629 {
31630 if (!variable_value_hash)
31631 variable_value_hash
31632 = hash_table<variable_value_hasher>::create_ggc (10);
31633
31634 tree fndecl = DECL_CONTEXT (decl);
31635 struct variable_value_struct *node;
31636 struct variable_value_struct **slot
31637 = variable_value_hash->find_slot_with_hash (fndecl,
31638 DECL_UID (fndecl),
31639 INSERT);
31640 if (*slot == NULL)
31641 {
31642 node = ggc_cleared_alloc<variable_value_struct> ();
31643 node->decl_id = DECL_UID (fndecl);
31644 *slot = node;
31645 }
31646 else
31647 node = *slot;
31648
31649 vec_safe_push (node->dies, die);
31650 }
31651 }
31652 }
31653
31654 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31655 with dw_val_class_decl_ref operand. */
31656
31657 static void
31658 note_variable_value (dw_die_ref die)
31659 {
31660 dw_die_ref c;
31661 dw_attr_node *a;
31662 dw_loc_list_ref loc;
31663 unsigned ix;
31664
31665 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31666 switch (AT_class (a))
31667 {
31668 case dw_val_class_loc_list:
31669 loc = AT_loc_list (a);
31670 gcc_assert (loc);
31671 if (!loc->noted_variable_value)
31672 {
31673 loc->noted_variable_value = 1;
31674 for (; loc; loc = loc->dw_loc_next)
31675 note_variable_value_in_expr (die, loc->expr);
31676 }
31677 break;
31678 case dw_val_class_loc:
31679 note_variable_value_in_expr (die, AT_loc (a));
31680 break;
31681 default:
31682 break;
31683 }
31684
31685 /* Mark children. */
31686 FOR_EACH_CHILD (die, c, note_variable_value (c));
31687 }
31688
31689 /* Perform any cleanups needed after the early debug generation pass
31690 has run. */
31691
31692 static void
31693 dwarf2out_early_finish (const char *filename)
31694 {
31695 set_early_dwarf s;
31696
31697 /* PCH might result in DW_AT_producer string being restored from the
31698 header compilation, so always fill it with empty string initially
31699 and overwrite only here. */
31700 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31701 producer_string = gen_producer_string ();
31702 producer->dw_attr_val.v.val_str->refcount--;
31703 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31704
31705 /* Add the name for the main input file now. We delayed this from
31706 dwarf2out_init to avoid complications with PCH. */
31707 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31708 add_comp_dir_attribute (comp_unit_die ());
31709
31710 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31711 DW_AT_comp_dir into .debug_line_str section. */
31712 if (!dwarf2out_as_loc_support
31713 && dwarf_version >= 5
31714 && DWARF5_USE_DEBUG_LINE_STR)
31715 {
31716 for (int i = 0; i < 2; i++)
31717 {
31718 dw_attr_node *a = get_AT (comp_unit_die (),
31719 i ? DW_AT_comp_dir : DW_AT_name);
31720 if (a == NULL
31721 || AT_class (a) != dw_val_class_str
31722 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31723 continue;
31724
31725 if (! debug_line_str_hash)
31726 debug_line_str_hash
31727 = hash_table<indirect_string_hasher>::create_ggc (10);
31728
31729 struct indirect_string_node *node
31730 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31731 set_indirect_string (node);
31732 node->form = DW_FORM_line_strp;
31733 a->dw_attr_val.v.val_str->refcount--;
31734 a->dw_attr_val.v.val_str = node;
31735 }
31736 }
31737
31738 /* With LTO early dwarf was really finished at compile-time, so make
31739 sure to adjust the phase after annotating the LTRANS CU DIE. */
31740 if (in_lto_p)
31741 {
31742 early_dwarf_finished = true;
31743 return;
31744 }
31745
31746 /* Walk through the list of incomplete types again, trying once more to
31747 emit full debugging info for them. */
31748 retry_incomplete_types ();
31749
31750 /* The point here is to flush out the limbo list so that it is empty
31751 and we don't need to stream it for LTO. */
31752 flush_limbo_die_list ();
31753
31754 gen_scheduled_generic_parms_dies ();
31755 gen_remaining_tmpl_value_param_die_attribute ();
31756
31757 /* Add DW_AT_linkage_name for all deferred DIEs. */
31758 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31759 {
31760 tree decl = node->created_for;
31761 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31762 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31763 ended up in deferred_asm_name before we knew it was
31764 constant and never written to disk. */
31765 && DECL_ASSEMBLER_NAME (decl))
31766 {
31767 add_linkage_attr (node->die, decl);
31768 move_linkage_attr (node->die);
31769 }
31770 }
31771 deferred_asm_name = NULL;
31772
31773 if (flag_eliminate_unused_debug_types)
31774 prune_unused_types ();
31775
31776 /* Generate separate COMDAT sections for type DIEs. */
31777 if (use_debug_types)
31778 {
31779 break_out_comdat_types (comp_unit_die ());
31780
31781 /* Each new type_unit DIE was added to the limbo die list when created.
31782 Since these have all been added to comdat_type_list, clear the
31783 limbo die list. */
31784 limbo_die_list = NULL;
31785
31786 /* For each new comdat type unit, copy declarations for incomplete
31787 types to make the new unit self-contained (i.e., no direct
31788 references to the main compile unit). */
31789 for (comdat_type_node *ctnode = comdat_type_list;
31790 ctnode != NULL; ctnode = ctnode->next)
31791 copy_decls_for_unworthy_types (ctnode->root_die);
31792 copy_decls_for_unworthy_types (comp_unit_die ());
31793
31794 /* In the process of copying declarations from one unit to another,
31795 we may have left some declarations behind that are no longer
31796 referenced. Prune them. */
31797 prune_unused_types ();
31798 }
31799
31800 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31801 with dw_val_class_decl_ref operand. */
31802 note_variable_value (comp_unit_die ());
31803 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31804 note_variable_value (node->die);
31805 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31806 ctnode = ctnode->next)
31807 note_variable_value (ctnode->root_die);
31808 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31809 note_variable_value (node->die);
31810
31811 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31812 both the main_cu and all skeleton TUs. Making this call unconditional
31813 would end up either adding a second copy of the AT_pubnames attribute, or
31814 requiring a special case in add_top_level_skeleton_die_attrs. */
31815 if (!dwarf_split_debug_info)
31816 add_AT_pubnames (comp_unit_die ());
31817
31818 /* The early debug phase is now finished. */
31819 early_dwarf_finished = true;
31820
31821 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31822 if (!flag_generate_lto && !flag_generate_offload)
31823 return;
31824
31825 /* Now as we are going to output for LTO initialize sections and labels
31826 to the LTO variants. We don't need a random-seed postfix as other
31827 LTO sections as linking the LTO debug sections into one in a partial
31828 link is fine. */
31829 init_sections_and_labels (true);
31830
31831 /* The output below is modeled after dwarf2out_finish with all
31832 location related output removed and some LTO specific changes.
31833 Some refactoring might make both smaller and easier to match up. */
31834
31835 /* Traverse the DIE's and add add sibling attributes to those DIE's
31836 that have children. */
31837 add_sibling_attributes (comp_unit_die ());
31838 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31839 add_sibling_attributes (node->die);
31840 for (comdat_type_node *ctnode = comdat_type_list;
31841 ctnode != NULL; ctnode = ctnode->next)
31842 add_sibling_attributes (ctnode->root_die);
31843
31844 if (have_macinfo)
31845 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31846 macinfo_section_label);
31847
31848 save_macinfo_strings ();
31849
31850 if (dwarf_split_debug_info)
31851 {
31852 unsigned int index = 0;
31853 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31854 }
31855
31856 /* Output all of the compilation units. We put the main one last so that
31857 the offsets are available to output_pubnames. */
31858 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31859 output_comp_unit (node->die, 0, NULL);
31860
31861 hash_table<comdat_type_hasher> comdat_type_table (100);
31862 for (comdat_type_node *ctnode = comdat_type_list;
31863 ctnode != NULL; ctnode = ctnode->next)
31864 {
31865 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31866
31867 /* Don't output duplicate types. */
31868 if (*slot != HTAB_EMPTY_ENTRY)
31869 continue;
31870
31871 /* Add a pointer to the line table for the main compilation unit
31872 so that the debugger can make sense of DW_AT_decl_file
31873 attributes. */
31874 if (debug_info_level >= DINFO_LEVEL_TERSE)
31875 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31876 (!dwarf_split_debug_info
31877 ? debug_line_section_label
31878 : debug_skeleton_line_section_label));
31879
31880 output_comdat_type_unit (ctnode);
31881 *slot = ctnode;
31882 }
31883
31884 /* Stick a unique symbol to the main debuginfo section. */
31885 compute_comp_unit_symbol (comp_unit_die ());
31886
31887 /* Output the main compilation unit. We always need it if only for
31888 the CU symbol. */
31889 output_comp_unit (comp_unit_die (), true, NULL);
31890
31891 /* Output the abbreviation table. */
31892 if (vec_safe_length (abbrev_die_table) != 1)
31893 {
31894 switch_to_section (debug_abbrev_section);
31895 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31896 output_abbrev_section ();
31897 }
31898
31899 /* Have to end the macro section. */
31900 if (have_macinfo)
31901 {
31902 /* We have to save macinfo state if we need to output it again
31903 for the FAT part of the object. */
31904 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
31905 if (flag_fat_lto_objects)
31906 macinfo_table = macinfo_table->copy ();
31907
31908 switch_to_section (debug_macinfo_section);
31909 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31910 output_macinfo (debug_skeleton_line_section_label, true);
31911 dw2_asm_output_data (1, 0, "End compilation unit");
31912
31913 /* Emit a skeleton debug_line section. */
31914 switch_to_section (debug_skeleton_line_section);
31915 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31916 output_line_info (true);
31917
31918 if (flag_fat_lto_objects)
31919 {
31920 vec_free (macinfo_table);
31921 macinfo_table = saved_macinfo_table;
31922 }
31923 }
31924
31925
31926 /* If we emitted any indirect strings, output the string table too. */
31927 if (debug_str_hash || skeleton_debug_str_hash)
31928 output_indirect_strings ();
31929
31930 /* Switch back to the text section. */
31931 switch_to_section (text_section);
31932 }
31933
31934 /* Reset all state within dwarf2out.c so that we can rerun the compiler
31935 within the same process. For use by toplev::finalize. */
31936
31937 void
31938 dwarf2out_c_finalize (void)
31939 {
31940 last_var_location_insn = NULL;
31941 cached_next_real_insn = NULL;
31942 used_rtx_array = NULL;
31943 incomplete_types = NULL;
31944 decl_scope_table = NULL;
31945 debug_info_section = NULL;
31946 debug_skeleton_info_section = NULL;
31947 debug_abbrev_section = NULL;
31948 debug_skeleton_abbrev_section = NULL;
31949 debug_aranges_section = NULL;
31950 debug_addr_section = NULL;
31951 debug_macinfo_section = NULL;
31952 debug_line_section = NULL;
31953 debug_skeleton_line_section = NULL;
31954 debug_loc_section = NULL;
31955 debug_pubnames_section = NULL;
31956 debug_pubtypes_section = NULL;
31957 debug_str_section = NULL;
31958 debug_line_str_section = NULL;
31959 debug_str_dwo_section = NULL;
31960 debug_str_offsets_section = NULL;
31961 debug_ranges_section = NULL;
31962 debug_frame_section = NULL;
31963 fde_vec = NULL;
31964 debug_str_hash = NULL;
31965 debug_line_str_hash = NULL;
31966 skeleton_debug_str_hash = NULL;
31967 dw2_string_counter = 0;
31968 have_multiple_function_sections = false;
31969 text_section_used = false;
31970 cold_text_section_used = false;
31971 cold_text_section = NULL;
31972 current_unit_personality = NULL;
31973
31974 early_dwarf = false;
31975 early_dwarf_finished = false;
31976
31977 next_die_offset = 0;
31978 single_comp_unit_die = NULL;
31979 comdat_type_list = NULL;
31980 limbo_die_list = NULL;
31981 file_table = NULL;
31982 decl_die_table = NULL;
31983 common_block_die_table = NULL;
31984 decl_loc_table = NULL;
31985 call_arg_locations = NULL;
31986 call_arg_loc_last = NULL;
31987 call_site_count = -1;
31988 tail_call_site_count = -1;
31989 cached_dw_loc_list_table = NULL;
31990 abbrev_die_table = NULL;
31991 delete dwarf_proc_stack_usage_map;
31992 dwarf_proc_stack_usage_map = NULL;
31993 line_info_label_num = 0;
31994 cur_line_info_table = NULL;
31995 text_section_line_info = NULL;
31996 cold_text_section_line_info = NULL;
31997 separate_line_info = NULL;
31998 info_section_emitted = false;
31999 pubname_table = NULL;
32000 pubtype_table = NULL;
32001 macinfo_table = NULL;
32002 ranges_table = NULL;
32003 ranges_by_label = NULL;
32004 rnglist_idx = 0;
32005 have_location_lists = false;
32006 loclabel_num = 0;
32007 poc_label_num = 0;
32008 last_emitted_file = NULL;
32009 label_num = 0;
32010 tmpl_value_parm_die_table = NULL;
32011 generic_type_instances = NULL;
32012 frame_pointer_fb_offset = 0;
32013 frame_pointer_fb_offset_valid = false;
32014 base_types.release ();
32015 XDELETEVEC (producer_string);
32016 producer_string = NULL;
32017 }
32018
32019 #include "gt-dwarf2out.h"